From d044588d702b3dc2dd6b9a9e28056df19d942a09 Mon Sep 17 00:00:00 2001 From: Breno Costa <35263725+breno-costa@users.noreply.github.com> Date: Thu, 30 Jun 2022 02:11:16 +0200 Subject: [PATCH 01/73] fix: Fixing broken links to feast documentation on java readme and contribution (#2892) Signed-off-by: Breno Costa --- java/CONTRIBUTING.md | 4 ++-- java/README.md | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/java/CONTRIBUTING.md b/java/CONTRIBUTING.md index 86eacfef41..f6c789d984 100644 --- a/java/CONTRIBUTING.md +++ b/java/CONTRIBUTING.md @@ -1,5 +1,5 @@ # Development Guide: feast-java -> The higher level [Development Guide](https://docs.feast.dev/contributing/development-guide) +> The higher level [Development Guide](https://docs.feast.dev/v/master/project/development-guide) > gives contributing to Feast codebase as a whole. ### Overview @@ -9,7 +9,7 @@ the feast-java Repository: - [Feast Java Client](#feast-java-client) > Don't see the Feast component that you want to contribute to here? -> Check out the [Development Guide](https://docs.feast.dev/contributing/development-guide) +> Check out the [Development Guide](https://docs.feast.dev/v/master/project/development-guide) > to learn how Feast components are distributed over multiple repositories. #### Common Setup diff --git a/java/README.md b/java/README.md index ff5a1b8553..8c3d93628e 100644 --- a/java/README.md +++ b/java/README.md @@ -13,8 +13,8 @@ The process of ingesting data into the online store (Redis) is decoupled from th ### Contributing Guides on Contributing: -- [Contribution Process for Feast](https://docs.feast.dev/v/master/contributing/contributing) -- [Development Guide for Feast](https://docs.feast.dev/contributing/development-guide) +- [Contribution Process for Feast](https://docs.feast.dev/v/master/project/contributing) +- [Development Guide for Feast](https://docs.feast.dev/v/master/project/development-guide) - [Development Guide for feast-java (this repository)](CONTRIBUTING.md) ### Installing using Helm From 80ea7a93a9d7ea19f9a1218430e008a33eb6d788 Mon Sep 17 00:00:00 2001 From: Breno Costa <35263725+breno-costa@users.noreply.github.com> Date: Thu, 30 Jun 2022 03:40:15 +0200 Subject: [PATCH 02/73] fix: Change numpy version on setup.py and upgrade it to resolve dependabot warning (#2887) * Upgrade numpy version on setup.py and resolve dependabot warning Signed-off-by: Breno Costa * Upgrade python base image from 3.7 to 3.8 Signed-off-by: Breno Costa --- .../src/test/resources/docker-compose/feast10/Dockerfile | 2 +- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.10-requirements.txt | 2 +- sdk/python/requirements/py3.8-ci-requirements.txt | 2 +- sdk/python/requirements/py3.8-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-requirements.txt | 2 +- setup.py | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/java/serving/src/test/resources/docker-compose/feast10/Dockerfile b/java/serving/src/test/resources/docker-compose/feast10/Dockerfile index dc26c804a9..dee7dcf84c 100644 --- a/java/serving/src/test/resources/docker-compose/feast10/Dockerfile +++ b/java/serving/src/test/resources/docker-compose/feast10/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.7 +FROM python:3.8 WORKDIR /usr/src/ diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 3bdf468bb2..bcd1095d07 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -384,7 +384,7 @@ nbformat==5.4.0 # via great-expectations nodeenv==1.6.0 # via pre-commit -numpy==1.21.6 +numpy==1.22.0 # via # altair # feast (setup.py) diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 15ee46aff5..dc60ea1e70 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -89,7 +89,7 @@ mypy==0.961 # via sqlalchemy mypy-extensions==0.4.3 # via mypy -numpy==1.21.6 +numpy==1.22.0 # via # feast (setup.py) # pandas diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index dd21fae0a2..2be1e80464 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -390,7 +390,7 @@ nbformat==5.4.0 # via great-expectations nodeenv==1.6.0 # via pre-commit -numpy==1.21.6 +numpy==1.22.0 # via # altair # feast (setup.py) diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index 7756acad31..8ba9f4c35d 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -91,7 +91,7 @@ mypy==0.961 # via sqlalchemy mypy-extensions==0.4.3 # via mypy -numpy==1.21.6 +numpy==1.22.0 # via # feast (setup.py) # pandas diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index f9f65633f0..e0fbb8092d 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -384,7 +384,7 @@ nbformat==5.4.0 # via great-expectations nodeenv==1.6.0 # via pre-commit -numpy==1.21.6 +numpy==1.22.0 # via # altair # feast (setup.py) diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index f5c15dad5d..da1a7f4345 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -89,7 +89,7 @@ mypy==0.961 # via sqlalchemy mypy-extensions==0.4.3 # via mypy -numpy==1.21.6 +numpy==1.22.0 # via # feast (setup.py) # pandas diff --git a/setup.py b/setup.py index d32b13deed..55a715b53a 100644 --- a/setup.py +++ b/setup.py @@ -57,7 +57,7 @@ "Jinja2>=2,<4", "jsonschema", "mmh3", - "numpy<1.22,<2", + "numpy>=1.22,<2", "pandas>=1,<2", "pandavro==1.5.*", "protobuf>=3.10,<3.20", From 8e2a3752b847c5c4753d02e0fe190dd303c9f2c7 Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Wed, 29 Jun 2022 20:33:16 -0700 Subject: [PATCH 03/73] chore: Verification workflow in build wheels that ensures the wheels built are not development and the correct versions (#2893) * Test Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix workflow Signed-off-by: Kevin Zhang * Fix workflow Signed-off-by: Kevin Zhang * Test Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * test Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * test Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang --- .github/workflows/build_wheels.yml | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 6ee866781d..b578501b59 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -1,7 +1,13 @@ name: build_wheels # Call this workflow from other workflows in the repository by specifying "uses: ./.github/workflows/build_wheels.yml" -on: [workflow_dispatch, workflow_call] +# Developers who are starting a new release should use this workflow to ensure wheels will be built correctly. +# Devs should check out their fork, add a tag to the last master commit on their fork, and run the release off of their fork on the added tag to ensure wheels will be built correctly. +on: + workflow_dispatch: + tags: + - 'v*.*.*' + workflow_call: jobs: get-version: @@ -134,7 +140,7 @@ jobs: verify-python-wheels: runs-on: ${{ matrix.os }} - needs: [build-python-wheel, build-source-distribution] + needs: [build-python-wheel, build-source-distribution, get-version] strategy: matrix: os: [ubuntu-latest, macos-10.15 ] @@ -153,6 +159,7 @@ jobs: else echo "Succeeded!" fi + VERSION_WITHOUT_PREFIX: ${{ needs.get-version.outputs.version_without_prefix }} steps: - name: Setup Python id: setup-python @@ -190,6 +197,19 @@ jobs: - name: Install OS X dependencies if: matrix.os == 'macos-10.15' run: brew install coreutils + # Validate that the feast version installed is not development and is the correct version of the tag we ran it off of. + - name: Validate Feast Version + run: | + VERSION_REGEX='[0-9]+\.[0-9]+\.[0-9]+' + OUTPUT_REGEX='^Feast SDK Version: "$VERSION_REGEX"$' + VERSION_OUTPUT=$(feast version) + VERSION=$(echo $VERSION_OUTPUT | grep -oE "$VERSION_REGEX") + OUTPUT=$(echo $VERSION_OUTPUT | grep -E "$REGEX") + if [ -n "$OUTPUT" ] && [ "$VERSION" = "$VERSION_WITHOUT_PREFIX" ]; then + echo "Correct Feast Version Installed" + else + echo "$VERSION_OUTPUT from installed wheel is not in the correct format or doesn't have the right version $VERSION." + fi - name: Smoke test run: | feast init test_repo @@ -198,6 +218,7 @@ jobs: echo "$TEST_SCRIPT" > run-and-wait.sh bash run-and-wait.sh feast serve bash run-and-wait.sh feast ui + # We disable this test for the Python 3.10 binary since it does not include Go. - name: Smoke test with go if: matrix.python-version != '3.10' || matrix.os == 'ubuntu-latest' From 86e9efdc893de817a359feb939f06717716c0b17 Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Thu, 30 Jun 2022 10:16:30 -0700 Subject: [PATCH 04/73] fix: Fix grpc and update protobuf (#2894) * Test Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix workflow Signed-off-by: Kevin Zhang * Fix workflow Signed-off-by: Kevin Zhang * Test Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * test Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * test Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix protobuf pin version and add path variable Signed-off-by: Kevin Zhang * Revert setup Signed-off-by: Kevin Zhang * Update 3.8 Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Update 3.10 Signed-off-by: Kevin Zhang * update Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix test Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Update go with grpc 1.47 Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang --- .github/workflows/build_wheels.yml | 4 +- Makefile | 2 +- go.mod | 2 +- go.sum | 5 +- pyproject.toml | 2 +- sdk/python/feast/proto_json.py | 8 +-- .../requirements/py3.10-ci-requirements.txt | 54 +++++++++--------- .../requirements/py3.10-requirements.txt | 18 +++--- .../requirements/py3.8-ci-requirements.txt | 56 +++++++++---------- .../requirements/py3.8-requirements.txt | 20 +++---- .../requirements/py3.9-ci-requirements.txt | 54 +++++++++--------- .../requirements/py3.9-requirements.txt | 18 +++--- sdk/python/tests/unit/test_proto_json.py | 2 +- setup.py | 16 +++--- 14 files changed, 126 insertions(+), 135 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index b578501b59..ea93cc85a4 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -186,7 +186,7 @@ jobs: env: COMPILE_GO: "True" run: | - pip install 'grpcio-tools==1.44.0' 'pybindgen==0.22.0' + pip install 'grpcio-tools==1.47.0' 'pybindgen==0.22.0' go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.26.0 go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.1.0 pip install dist/*tar.gz @@ -209,6 +209,7 @@ jobs: echo "Correct Feast Version Installed" else echo "$VERSION_OUTPUT from installed wheel is not in the correct format or doesn't have the right version $VERSION." + exit 1 fi - name: Smoke test run: | @@ -218,7 +219,6 @@ jobs: echo "$TEST_SCRIPT" > run-and-wait.sh bash run-and-wait.sh feast serve bash run-and-wait.sh feast ui - # We disable this test for the Python 3.10 binary since it does not include Go. - name: Smoke test with go if: matrix.python-version != '3.10' || matrix.os == 'ubuntu-latest' diff --git a/Makefile b/Makefile index 88f04aa95d..176e2cb354 100644 --- a/Makefile +++ b/Makefile @@ -180,7 +180,7 @@ install-go-ci-dependencies: python -m pip install pybindgen==0.22.0 install-protoc-dependencies: - pip install grpcio-tools==1.44.0 mypy-protobuf==3.1.0 + pip install grpcio-tools==1.47.0 mypy-protobuf==3.1.0 compile-protos-go: install-go-proto-dependencies install-protoc-dependencies python setup.py build_go_protos diff --git a/go.mod b/go.mod index fbbc95e1bf..90ddb93e21 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,7 @@ require ( github.com/pkg/errors v0.9.1 github.com/spaolacci/murmur3 v1.1.0 github.com/stretchr/testify v1.7.0 - google.golang.org/grpc v1.45.0 + google.golang.org/grpc v1.47.0 google.golang.org/protobuf v1.28.0 ) diff --git a/go.sum b/go.sum index 698d1ef640..933ecf6b29 100644 --- a/go.sum +++ b/go.sum @@ -55,6 +55,7 @@ github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XP github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= @@ -83,6 +84,7 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/feast-dev/gopy v0.4.1-0.20220429180328-4257ac71a4d0 h1:Go714ObVP1O+a6qK7haXVL28QNm6WMD8bwnN9EA8PlM= @@ -588,8 +590,9 @@ google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTp google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.45.0 h1:NEpgUqV3Z+ZjkqMsxMg11IaDrXY4RY6CQukSGK0uI1M= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.47.0 h1:9n77onPX5F3qfFCqjy9dhn8PbNQsIKeVU04J9G7umt8= +google.golang.org/grpc v1.47.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= diff --git a/pyproject.toml b/pyproject.toml index 64394a558a..8ba7254440 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=60", "wheel", "setuptools_scm>=6.2", "grpcio", "grpcio-tools==1.44.0", "mypy-protobuf==3.1", "sphinx!=4.0.0"] +requires = ["setuptools>=60", "wheel", "setuptools_scm>=6.2", "grpcio", "grpcio-tools==1.47.0", "mypy-protobuf==3.1", "sphinx!=4.0.0"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] diff --git a/sdk/python/feast/proto_json.py b/sdk/python/feast/proto_json.py index 44e004cb03..58b77edf8b 100644 --- a/sdk/python/feast/proto_json.py +++ b/sdk/python/feast/proto_json.py @@ -70,7 +70,7 @@ def to_json_object(printer: _Printer, message: ProtoMessage) -> JsonObject: return value def from_json_object( - parser: _Parser, value: JsonObject, message: ProtoMessage, + parser: _Parser, value: JsonObject, message: ProtoMessage, path: str ) -> None: if value is None: message.null_val = 0 @@ -142,11 +142,11 @@ def to_json_object(printer: _Printer, message: ProtoMessage) -> JsonObject: return [printer._MessageToJsonObject(item) for item in message.val] def from_json_object( - parser: _Parser, value: JsonObject, message: ProtoMessage, + parser: _Parser, value: JsonObject, message: ProtoMessage, path: str ) -> None: array = value if isinstance(value, list) else value["val"] for item in array: - parser.ConvertMessage(item, message.val.add()) + parser.ConvertMessage(item, message.val.add(), path) _patch_proto_json_encoding(RepeatedValue, to_json_object, from_json_object) @@ -183,7 +183,7 @@ def to_json_object(printer: _Printer, message: ProtoMessage) -> JsonObject: return list(message.val) def from_json_object( - parser: _Parser, value: JsonObject, message: ProtoMessage, + parser: _Parser, value: JsonObject, message: ProtoMessage, path: str ) -> None: array = value if isinstance(value, list) else value["val"] message.val.extend(array) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index bcd1095d07..e0b05ecaa0 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -90,7 +90,9 @@ botocore==1.23.24 bowler==0.9.0 # via feast (setup.py) build==0.8.0 - # via feast (setup.py) + # via + # feast (setup.py) + # pip-tools cachecontrol==0.12.11 # via firebase-admin cachetools==4.2.4 @@ -173,7 +175,7 @@ executing==0.8.3 # via stack-data fastapi==0.78.0 # via feast (setup.py) -fastavro==1.5.1 +fastavro==1.5.2 # via # feast (setup.py) # pandavro @@ -209,7 +211,7 @@ google-api-core[grpc]==1.31.6 # google-cloud-core # google-cloud-datastore # google-cloud-firestore -google-api-python-client==2.51.0 +google-api-python-client==2.52.0 # via firebase-admin google-auth==1.35.0 # via @@ -257,8 +259,6 @@ googleapis-common-protos==1.56.3 # tensorflow-metadata great-expectations==0.14.13 # via feast (setup.py) -greenlet==1.1.2 - # via sqlalchemy grpcio==1.47.0 # via # feast (setup.py) @@ -269,9 +269,9 @@ grpcio==1.47.0 # grpcio-tools grpcio-reflection==1.47.0 # via feast (setup.py) -grpcio-testing==1.44.0 +grpcio-testing==1.47.0 # via feast (setup.py) -grpcio-tools==1.44.0 +grpcio-tools==1.47.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -295,7 +295,7 @@ idna==3.3 # yarl imagesize==1.3.0 # via sphinx -importlib-metadata==4.11.4 +importlib-metadata==4.12.0 # via great-expectations iniconfig==1.1.1 # via pytest @@ -322,7 +322,7 @@ jsonpatch==1.32 # via great-expectations jsonpointer==2.3 # via jsonpatch -jsonschema==4.6.0 +jsonschema==4.6.1 # via # altair # feast (setup.py) @@ -342,7 +342,7 @@ mccabe==0.6.1 # via flake8 minio==7.1.0 # via feast (setup.py) -mistune==2.0.2 +mistune==2.0.3 # via great-expectations mmh3==3.0.0 # via feast (setup.py) @@ -350,7 +350,7 @@ mock==2.0.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -moto==3.1.14 +moto==3.1.16 # via feast (setup.py) msal==1.18.0 # via @@ -382,7 +382,7 @@ mysqlclient==2.1.1 # via feast (setup.py) nbformat==5.4.0 # via great-expectations -nodeenv==1.6.0 +nodeenv==1.7.0 # via pre-commit numpy==1.22.0 # via @@ -426,14 +426,12 @@ pathspec==0.9.0 pbr==5.9.0 # via mock pep517==0.12.0 - # via - # build - # pip-tools + # via build pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==6.6.2 +pip-tools==6.7.0 # via feast (setup.py) platformdirs==2.5.2 # via virtualenv @@ -445,7 +443,7 @@ portalocker==2.4.0 # via msal-extensions pre-commit==2.19.0 # via feast (setup.py) -prompt-toolkit==3.0.29 +prompt-toolkit==3.0.30 # via ipython proto-plus==1.20.6 # via @@ -454,7 +452,7 @@ proto-plus==1.20.6 # google-cloud-bigquery-storage # google-cloud-datastore # google-cloud-firestore -protobuf==3.19.4 +protobuf==3.20.1 # via # feast (setup.py) # google-api-core @@ -588,7 +586,7 @@ redis==4.2.2 # via feast (setup.py) regex==2022.6.2 # via black -requests==2.28.0 +requests==2.28.1 # via # adal # adlfs @@ -665,7 +663,7 @@ sphinxcontrib-qthelp==1.0.3 # via sphinx sphinxcontrib-serializinghtml==1.1.5 # via sphinx -sqlalchemy[mypy]==1.4.38 +sqlalchemy[mypy]==1.4.39 # via feast (setup.py) sqlalchemy2-stubs==0.0.2a24 # via sqlalchemy @@ -724,17 +722,17 @@ types-protobuf==3.19.22 # mypy-protobuf types-python-dateutil==2.8.18 # via feast (setup.py) -types-pytz==2022.1.0 +types-pytz==2022.1.1 # via feast (setup.py) -types-pyyaml==6.0.8 +types-pyyaml==6.0.9 # via feast (setup.py) -types-redis==4.3.2 +types-redis==4.3.3 # via feast (setup.py) -types-requests==2.27.31 +types-requests==2.28.0 # via feast (setup.py) -types-setuptools==57.4.17 +types-setuptools==57.4.18 # via feast (setup.py) -types-tabulate==0.8.10 +types-tabulate==0.8.11 # via feast (setup.py) types-urllib3==1.26.15 # via types-requests @@ -759,11 +757,11 @@ urllib3==1.26.9 # minio # requests # responses -uvicorn[standard]==0.18.1 +uvicorn[standard]==0.18.2 # via feast (setup.py) uvloop==0.16.0 # via uvicorn -virtualenv==20.14.1 +virtualenv==20.15.1 # via pre-commit volatile==2.1.0 # via bowler diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index dc60ea1e70..d4a4425aec 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -22,7 +22,7 @@ cachetools==5.2.0 # via google-auth certifi==2022.6.15 # via requests -charset-normalizer==2.0.12 +charset-normalizer==2.1.0 # via requests click==8.0.1 # via @@ -40,7 +40,7 @@ dill==0.3.5.1 # via feast (setup.py) fastapi==0.78.0 # via feast (setup.py) -fastavro==1.5.1 +fastavro==1.5.2 # via # feast (setup.py) # pandavro @@ -50,15 +50,13 @@ fsspec==2022.5.0 # via dask google-api-core==2.8.2 # via feast (setup.py) -google-auth==2.8.0 +google-auth==2.9.0 # via google-api-core googleapis-common-protos==1.56.3 # via # feast (setup.py) # google-api-core # tensorflow-metadata -greenlet==1.1.2 - # via sqlalchemy grpcio==1.47.0 # via # feast (setup.py) @@ -75,7 +73,7 @@ idna==3.3 # requests jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.6.0 +jsonschema==4.6.1 # via feast (setup.py) locket==1.0.0 # via partd @@ -107,7 +105,7 @@ partd==1.2.0 # via dask proto-plus==1.20.6 # via feast (setup.py) -protobuf==3.19.4 +protobuf==3.20.1 # via # feast (setup.py) # google-api-core @@ -144,7 +142,7 @@ pyyaml==6.0 # dask # feast (setup.py) # uvicorn -requests==2.28.0 +requests==2.28.1 # via google-api-core rsa==4.8 # via google-auth @@ -156,7 +154,7 @@ six==1.16.0 # python-dateutil sniffio==1.2.0 # via anyio -sqlalchemy[mypy]==1.4.38 +sqlalchemy[mypy]==1.4.39 # via feast (setup.py) sqlalchemy2-stubs==0.0.2a24 # via sqlalchemy @@ -187,7 +185,7 @@ typing-extensions==4.2.0 # sqlalchemy2-stubs urllib3==1.26.9 # via requests -uvicorn[standard]==0.18.1 +uvicorn[standard]==0.18.2 # via feast (setup.py) uvloop==0.16.0 # via uvicorn diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 2be1e80464..813420a121 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -94,7 +94,9 @@ botocore==1.23.24 bowler==0.9.0 # via feast (setup.py) build==0.8.0 - # via feast (setup.py) + # via + # feast (setup.py) + # pip-tools cachecontrol==0.12.11 # via firebase-admin cachetools==4.2.4 @@ -177,7 +179,7 @@ executing==0.8.3 # via stack-data fastapi==0.78.0 # via feast (setup.py) -fastavro==1.5.1 +fastavro==1.5.2 # via # feast (setup.py) # pandavro @@ -213,7 +215,7 @@ google-api-core[grpc]==1.31.6 # google-cloud-core # google-cloud-datastore # google-cloud-firestore -google-api-python-client==2.51.0 +google-api-python-client==2.52.0 # via firebase-admin google-auth==1.35.0 # via @@ -261,8 +263,6 @@ googleapis-common-protos==1.56.3 # tensorflow-metadata great-expectations==0.14.13 # via feast (setup.py) -greenlet==1.1.2 - # via sqlalchemy grpcio==1.47.0 # via # feast (setup.py) @@ -273,9 +273,9 @@ grpcio==1.47.0 # grpcio-tools grpcio-reflection==1.47.0 # via feast (setup.py) -grpcio-testing==1.44.0 +grpcio-testing==1.47.0 # via feast (setup.py) -grpcio-tools==1.44.0 +grpcio-tools==1.47.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -299,7 +299,7 @@ idna==3.3 # yarl imagesize==1.3.0 # via sphinx -importlib-metadata==4.11.4 +importlib-metadata==4.12.0 # via great-expectations importlib-resources==5.8.0 # via jsonschema @@ -328,7 +328,7 @@ jsonpatch==1.32 # via great-expectations jsonpointer==2.3 # via jsonpatch -jsonschema==4.6.0 +jsonschema==4.6.1 # via # altair # feast (setup.py) @@ -348,7 +348,7 @@ mccabe==0.6.1 # via flake8 minio==7.1.0 # via feast (setup.py) -mistune==2.0.2 +mistune==2.0.3 # via great-expectations mmh3==3.0.0 # via feast (setup.py) @@ -356,7 +356,7 @@ mock==2.0.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -moto==3.1.14 +moto==3.1.16 # via feast (setup.py) msal==1.18.0 # via @@ -388,9 +388,9 @@ mysqlclient==2.1.1 # via feast (setup.py) nbformat==5.4.0 # via great-expectations -nodeenv==1.6.0 +nodeenv==1.7.0 # via pre-commit -numpy==1.22.0 +numpy==1.23.0 # via # altair # feast (setup.py) @@ -432,14 +432,12 @@ pathspec==0.9.0 pbr==5.9.0 # via mock pep517==0.12.0 - # via - # build - # pip-tools + # via build pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==6.6.2 +pip-tools==6.7.0 # via feast (setup.py) platformdirs==2.5.2 # via virtualenv @@ -451,7 +449,7 @@ portalocker==2.4.0 # via msal-extensions pre-commit==2.19.0 # via feast (setup.py) -prompt-toolkit==3.0.29 +prompt-toolkit==3.0.30 # via ipython proto-plus==1.20.6 # via @@ -460,7 +458,7 @@ proto-plus==1.20.6 # google-cloud-bigquery-storage # google-cloud-datastore # google-cloud-firestore -protobuf==3.19.4 +protobuf==3.20.1 # via # feast (setup.py) # google-api-core @@ -594,7 +592,7 @@ redis==4.2.2 # via feast (setup.py) regex==2022.6.2 # via black -requests==2.28.0 +requests==2.28.1 # via # adal # adlfs @@ -673,7 +671,7 @@ sphinxcontrib-qthelp==1.0.3 # via sphinx sphinxcontrib-serializinghtml==1.1.5 # via sphinx -sqlalchemy[mypy]==1.4.38 +sqlalchemy[mypy]==1.4.39 # via feast (setup.py) sqlalchemy2-stubs==0.0.2a24 # via sqlalchemy @@ -732,17 +730,17 @@ types-protobuf==3.19.22 # mypy-protobuf types-python-dateutil==2.8.18 # via feast (setup.py) -types-pytz==2022.1.0 +types-pytz==2022.1.1 # via feast (setup.py) -types-pyyaml==6.0.8 +types-pyyaml==6.0.9 # via feast (setup.py) -types-redis==4.3.2 +types-redis==4.3.3 # via feast (setup.py) -types-requests==2.27.31 +types-requests==2.28.0 # via feast (setup.py) -types-setuptools==57.4.17 +types-setuptools==57.4.18 # via feast (setup.py) -types-tabulate==0.8.10 +types-tabulate==0.8.11 # via feast (setup.py) types-urllib3==1.26.15 # via types-requests @@ -769,11 +767,11 @@ urllib3==1.26.9 # minio # requests # responses -uvicorn[standard]==0.18.1 +uvicorn[standard]==0.18.2 # via feast (setup.py) uvloop==0.16.0 # via uvicorn -virtualenv==20.14.1 +virtualenv==20.15.1 # via pre-commit volatile==2.1.0 # via bowler diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index 8ba9f4c35d..56e5e3adba 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -22,7 +22,7 @@ cachetools==5.2.0 # via google-auth certifi==2022.6.15 # via requests -charset-normalizer==2.0.12 +charset-normalizer==2.1.0 # via requests click==8.0.1 # via @@ -40,7 +40,7 @@ dill==0.3.5.1 # via feast (setup.py) fastapi==0.78.0 # via feast (setup.py) -fastavro==1.5.1 +fastavro==1.5.2 # via # feast (setup.py) # pandavro @@ -50,15 +50,13 @@ fsspec==2022.5.0 # via dask google-api-core==2.8.2 # via feast (setup.py) -google-auth==2.8.0 +google-auth==2.9.0 # via google-api-core googleapis-common-protos==1.56.3 # via # feast (setup.py) # google-api-core # tensorflow-metadata -greenlet==1.1.2 - # via sqlalchemy grpcio==1.47.0 # via # feast (setup.py) @@ -77,7 +75,7 @@ importlib-resources==5.8.0 # via jsonschema jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.6.0 +jsonschema==4.6.1 # via feast (setup.py) locket==1.0.0 # via partd @@ -91,7 +89,7 @@ mypy==0.961 # via sqlalchemy mypy-extensions==0.4.3 # via mypy -numpy==1.22.0 +numpy==1.23.0 # via # feast (setup.py) # pandas @@ -109,7 +107,7 @@ partd==1.2.0 # via dask proto-plus==1.20.6 # via feast (setup.py) -protobuf==3.19.4 +protobuf==3.20.1 # via # feast (setup.py) # google-api-core @@ -146,7 +144,7 @@ pyyaml==6.0 # dask # feast (setup.py) # uvicorn -requests==2.28.0 +requests==2.28.1 # via google-api-core rsa==4.8 # via google-auth @@ -158,7 +156,7 @@ six==1.16.0 # python-dateutil sniffio==1.2.0 # via anyio -sqlalchemy[mypy]==1.4.38 +sqlalchemy[mypy]==1.4.39 # via feast (setup.py) sqlalchemy2-stubs==0.0.2a24 # via sqlalchemy @@ -190,7 +188,7 @@ typing-extensions==4.2.0 # starlette urllib3==1.26.9 # via requests -uvicorn[standard]==0.18.1 +uvicorn[standard]==0.18.2 # via feast (setup.py) uvloop==0.16.0 # via uvicorn diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index e0fbb8092d..fe1b8b2e13 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -90,7 +90,9 @@ botocore==1.23.24 bowler==0.9.0 # via feast (setup.py) build==0.8.0 - # via feast (setup.py) + # via + # feast (setup.py) + # pip-tools cachecontrol==0.12.11 # via firebase-admin cachetools==4.2.4 @@ -173,7 +175,7 @@ executing==0.8.3 # via stack-data fastapi==0.78.0 # via feast (setup.py) -fastavro==1.5.1 +fastavro==1.5.2 # via # feast (setup.py) # pandavro @@ -209,7 +211,7 @@ google-api-core[grpc]==1.31.6 # google-cloud-core # google-cloud-datastore # google-cloud-firestore -google-api-python-client==2.51.0 +google-api-python-client==2.52.0 # via firebase-admin google-auth==1.35.0 # via @@ -257,8 +259,6 @@ googleapis-common-protos==1.56.3 # tensorflow-metadata great-expectations==0.14.13 # via feast (setup.py) -greenlet==1.1.2 - # via sqlalchemy grpcio==1.47.0 # via # feast (setup.py) @@ -269,9 +269,9 @@ grpcio==1.47.0 # grpcio-tools grpcio-reflection==1.47.0 # via feast (setup.py) -grpcio-testing==1.44.0 +grpcio-testing==1.47.0 # via feast (setup.py) -grpcio-tools==1.44.0 +grpcio-tools==1.47.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -295,7 +295,7 @@ idna==3.3 # yarl imagesize==1.3.0 # via sphinx -importlib-metadata==4.11.4 +importlib-metadata==4.12.0 # via great-expectations iniconfig==1.1.1 # via pytest @@ -322,7 +322,7 @@ jsonpatch==1.32 # via great-expectations jsonpointer==2.3 # via jsonpatch -jsonschema==4.6.0 +jsonschema==4.6.1 # via # altair # feast (setup.py) @@ -342,7 +342,7 @@ mccabe==0.6.1 # via flake8 minio==7.1.0 # via feast (setup.py) -mistune==2.0.2 +mistune==2.0.3 # via great-expectations mmh3==3.0.0 # via feast (setup.py) @@ -350,7 +350,7 @@ mock==2.0.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -moto==3.1.14 +moto==3.1.16 # via feast (setup.py) msal==1.18.0 # via @@ -382,7 +382,7 @@ mysqlclient==2.1.1 # via feast (setup.py) nbformat==5.4.0 # via great-expectations -nodeenv==1.6.0 +nodeenv==1.7.0 # via pre-commit numpy==1.22.0 # via @@ -426,14 +426,12 @@ pathspec==0.9.0 pbr==5.9.0 # via mock pep517==0.12.0 - # via - # build - # pip-tools + # via build pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==6.6.2 +pip-tools==6.7.0 # via feast (setup.py) platformdirs==2.5.2 # via virtualenv @@ -445,7 +443,7 @@ portalocker==2.4.0 # via msal-extensions pre-commit==2.19.0 # via feast (setup.py) -prompt-toolkit==3.0.29 +prompt-toolkit==3.0.30 # via ipython proto-plus==1.20.6 # via @@ -454,7 +452,7 @@ proto-plus==1.20.6 # google-cloud-bigquery-storage # google-cloud-datastore # google-cloud-firestore -protobuf==3.19.4 +protobuf==3.20.1 # via # feast (setup.py) # google-api-core @@ -588,7 +586,7 @@ redis==4.2.2 # via feast (setup.py) regex==2022.6.2 # via black -requests==2.28.0 +requests==2.28.1 # via # adal # adlfs @@ -667,7 +665,7 @@ sphinxcontrib-qthelp==1.0.3 # via sphinx sphinxcontrib-serializinghtml==1.1.5 # via sphinx -sqlalchemy[mypy]==1.4.38 +sqlalchemy[mypy]==1.4.39 # via feast (setup.py) sqlalchemy2-stubs==0.0.2a24 # via sqlalchemy @@ -726,17 +724,17 @@ types-protobuf==3.19.22 # mypy-protobuf types-python-dateutil==2.8.18 # via feast (setup.py) -types-pytz==2022.1.0 +types-pytz==2022.1.1 # via feast (setup.py) -types-pyyaml==6.0.8 +types-pyyaml==6.0.9 # via feast (setup.py) -types-redis==4.3.2 +types-redis==4.3.3 # via feast (setup.py) -types-requests==2.27.31 +types-requests==2.28.0 # via feast (setup.py) -types-setuptools==57.4.17 +types-setuptools==57.4.18 # via feast (setup.py) -types-tabulate==0.8.10 +types-tabulate==0.8.11 # via feast (setup.py) types-urllib3==1.26.15 # via types-requests @@ -763,11 +761,11 @@ urllib3==1.26.9 # minio # requests # responses -uvicorn[standard]==0.18.1 +uvicorn[standard]==0.18.2 # via feast (setup.py) uvloop==0.16.0 # via uvicorn -virtualenv==20.14.1 +virtualenv==20.15.1 # via pre-commit volatile==2.1.0 # via bowler diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index da1a7f4345..76e2815ed5 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -22,7 +22,7 @@ cachetools==5.2.0 # via google-auth certifi==2022.6.15 # via requests -charset-normalizer==2.0.12 +charset-normalizer==2.1.0 # via requests click==8.0.1 # via @@ -40,7 +40,7 @@ dill==0.3.5.1 # via feast (setup.py) fastapi==0.78.0 # via feast (setup.py) -fastavro==1.5.1 +fastavro==1.5.2 # via # feast (setup.py) # pandavro @@ -50,15 +50,13 @@ fsspec==2022.5.0 # via dask google-api-core==2.8.2 # via feast (setup.py) -google-auth==2.8.0 +google-auth==2.9.0 # via google-api-core googleapis-common-protos==1.56.3 # via # feast (setup.py) # google-api-core # tensorflow-metadata -greenlet==1.1.2 - # via sqlalchemy grpcio==1.47.0 # via # feast (setup.py) @@ -75,7 +73,7 @@ idna==3.3 # requests jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.6.0 +jsonschema==4.6.1 # via feast (setup.py) locket==1.0.0 # via partd @@ -107,7 +105,7 @@ partd==1.2.0 # via dask proto-plus==1.20.6 # via feast (setup.py) -protobuf==3.19.4 +protobuf==3.20.1 # via # feast (setup.py) # google-api-core @@ -144,7 +142,7 @@ pyyaml==6.0 # dask # feast (setup.py) # uvicorn -requests==2.28.0 +requests==2.28.1 # via google-api-core rsa==4.8 # via google-auth @@ -156,7 +154,7 @@ six==1.16.0 # python-dateutil sniffio==1.2.0 # via anyio -sqlalchemy[mypy]==1.4.38 +sqlalchemy[mypy]==1.4.39 # via feast (setup.py) sqlalchemy2-stubs==0.0.2a24 # via sqlalchemy @@ -188,7 +186,7 @@ typing-extensions==4.2.0 # starlette urllib3==1.26.9 # via requests -uvicorn[standard]==0.18.1 +uvicorn[standard]==0.18.2 # via feast (setup.py) uvloop==0.16.0 # via uvicorn diff --git a/sdk/python/tests/unit/test_proto_json.py b/sdk/python/tests/unit/test_proto_json.py index 6bfdbbbf91..235ebc7f93 100644 --- a/sdk/python/tests/unit/test_proto_json.py +++ b/sdk/python/tests/unit/test_proto_json.py @@ -81,7 +81,7 @@ def test_feast_repeated_value(proto_json_patch): # additional structure (e.g. [1,2,3] instead of {"val": [1,2,3]}) repeated_value_str = "[1,2,3]" repeated_value_proto = RepeatedValue() - Parse(repeated_value_str, repeated_value_proto) + Parse(repeated_value_str, repeated_value_proto, "") assertpy.assert_that(len(repeated_value_proto.val)).is_equal_to(3) assertpy.assert_that(repeated_value_proto.val[0].int64_val).is_equal_to(1) assertpy.assert_that(repeated_value_proto.val[1].int64_val).is_equal_to(2) diff --git a/setup.py b/setup.py index 55a715b53a..217b2e1011 100644 --- a/setup.py +++ b/setup.py @@ -52,15 +52,15 @@ "fastavro>=1.1.0,<2", "google-api-core>=1.23.0,<3", "googleapis-common-protos>=1.52.*,<2", - "grpcio>=1.34.0,<2", - "grpcio-reflection>=1.34.0,<2", + "grpcio>=1.47.0,<2", + "grpcio-reflection>=1.47.0,<2", "Jinja2>=2,<4", "jsonschema", "mmh3", "numpy>=1.22,<2", "pandas>=1,<2", "pandavro==1.5.*", - "protobuf>=3.10,<3.20", + "protobuf>3.20,<4", "proto-plus==1.20.*", "pyarrow>=4,<7", "pydantic>=1,<2", @@ -131,8 +131,8 @@ "flake8", "black==19.10b0", "isort>=5,<6", - "grpcio-tools==1.44.0", - "grpcio-testing==1.44.0", + "grpcio-tools==1.47.0", + "grpcio-testing==1.47.0", "minio==7.1.0", "mock==2.0.0", "moto", @@ -514,8 +514,8 @@ def copy_extensions_to_source(self): use_scm_version=use_scm_version, setup_requires=[ "setuptools_scm", - "grpcio", - "grpcio-tools==1.44.0", + "grpcio==1.47.0", + "grpcio-tools==1.47.0", "mypy-protobuf==3.1", "pybindgen==0.22.0", "sphinx!=4.0.0", @@ -533,4 +533,4 @@ def copy_extensions_to_source(self): ["github.com/feast-dev/feast/go/embedded"], ) ], -) +) \ No newline at end of file From 056cfa1b21db4ff092b9d1f9c06f7300a4c9f4b7 Mon Sep 17 00:00:00 2001 From: Yongheng Lin Date: Thu, 30 Jun 2022 13:00:27 -0700 Subject: [PATCH 05/73] feat: Support retrieval from multiple feature views with different join keys (#2835) * feat: Support retrieving from multiple feature views Signed-off-by: Yongheng Lin * group by join keys instead of feature view Signed-off-by: Yongheng Lin * tolerate insufficient entities Signed-off-by: Yongheng Lin * mock registry.getEntityJoinKey Signed-off-by: Yongheng Lin * add integration test Signed-off-by: Yongheng Lin --- .../java/feast/serving/registry/Registry.java | 15 +++ .../serving/registry/RegistryRepository.java | 4 + .../service/OnlineServingServiceV2.java | 101 +++++++++++++++--- .../feast/serving/it/ServingBaseTests.java | 30 ++++++ .../service/OnlineServingServiceTest.java | 6 ++ 5 files changed, 140 insertions(+), 16 deletions(-) diff --git a/java/serving/src/main/java/feast/serving/registry/Registry.java b/java/serving/src/main/java/feast/serving/registry/Registry.java index bc953174ea..a7b28f7c66 100644 --- a/java/serving/src/main/java/feast/serving/registry/Registry.java +++ b/java/serving/src/main/java/feast/serving/registry/Registry.java @@ -33,6 +33,7 @@ public class Registry { private Map onDemandFeatureViewNameToSpec; private final Map featureServiceNameToSpec; + private final Map entityNameToJoinKey; Registry(RegistryProto.Registry registry) { this.registry = registry; @@ -60,6 +61,12 @@ public class Registry { .collect( Collectors.toMap( FeatureServiceProto.FeatureServiceSpec::getName, Function.identity())); + this.entityNameToJoinKey = + registry.getEntitiesList().stream() + .map(EntityProto.Entity::getSpec) + .collect( + Collectors.toMap( + EntityProto.EntitySpecV2::getName, EntityProto.EntitySpecV2::getJoinKey)); } public RegistryProto.Registry getRegistry() { @@ -115,4 +122,12 @@ public FeatureServiceProto.FeatureServiceSpec getFeatureServiceSpec(String name) } return spec; } + + public String getEntityJoinKey(String name) { + String joinKey = entityNameToJoinKey.get(name); + if (joinKey == null) { + throw new SpecRetrievalException(String.format("Unable to find entity with name: %s", name)); + } + return joinKey; + } } diff --git a/java/serving/src/main/java/feast/serving/registry/RegistryRepository.java b/java/serving/src/main/java/feast/serving/registry/RegistryRepository.java index 369493ee0f..023ec1a062 100644 --- a/java/serving/src/main/java/feast/serving/registry/RegistryRepository.java +++ b/java/serving/src/main/java/feast/serving/registry/RegistryRepository.java @@ -102,4 +102,8 @@ public Duration getMaxAge(ServingAPIProto.FeatureReferenceV2 featureReference) { public List getEntitiesList(ServingAPIProto.FeatureReferenceV2 featureReference) { return getFeatureViewSpec(featureReference).getEntitiesList(); } + + public String getEntityJoinKey(String name) { + return this.registry.getEntityJoinKey(name); + } } diff --git a/java/serving/src/main/java/feast/serving/service/OnlineServingServiceV2.java b/java/serving/src/main/java/feast/serving/service/OnlineServingServiceV2.java index 12e8a5b158..3751ee8119 100644 --- a/java/serving/src/main/java/feast/serving/service/OnlineServingServiceV2.java +++ b/java/serving/src/main/java/feast/serving/service/OnlineServingServiceV2.java @@ -34,7 +34,6 @@ import feast.serving.registry.RegistryRepository; import feast.serving.util.Metrics; import feast.storage.api.retriever.OnlineRetrieverV2; -import io.grpc.Status; import io.opentracing.Span; import io.opentracing.Tracer; import java.util.*; @@ -51,6 +50,11 @@ public class OnlineServingServiceV2 implements ServingServiceV2 { private final OnlineTransformationService onlineTransformationService; private final String project; + public static final String DUMMY_ENTITY_ID = "__dummy_id"; + public static final String DUMMY_ENTITY_VAL = ""; + public static final ValueProto.Value DUMMY_ENTITY_VALUE = + ValueProto.Value.newBuilder().setStringVal(DUMMY_ENTITY_VAL).build(); + public OnlineServingServiceV2( OnlineRetrieverV2 retriever, Tracer tracer, @@ -103,31 +107,18 @@ public ServingAPIProto.GetOnlineFeaturesResponse getOnlineFeatures( List> entityRows = getEntityRows(request); - List entityNames; - if (retrievedFeatureReferences.size() > 0) { - entityNames = this.registryRepository.getEntitiesList(retrievedFeatureReferences.get(0)); - } else { - throw new RuntimeException("Requested features list must not be empty"); - } - Span storageRetrievalSpan = tracer.buildSpan("storageRetrieval").start(); if (storageRetrievalSpan != null) { storageRetrievalSpan.setTag("entities", entityRows.size()); storageRetrievalSpan.setTag("features", retrievedFeatureReferences.size()); } + List> features = - retriever.getOnlineFeatures(entityRows, retrievedFeatureReferences, entityNames); + retrieveFeatures(retrievedFeatureReferences, entityRows); if (storageRetrievalSpan != null) { storageRetrievalSpan.finish(); } - if (features.size() != entityRows.size()) { - throw Status.INTERNAL - .withDescription( - "The no. of FeatureRow obtained from OnlineRetriever" - + "does not match no. of entityRow passed.") - .asRuntimeException(); - } Span postProcessingSpan = tracer.buildSpan("postProcessing").start(); @@ -255,6 +246,84 @@ private List> getEntityRows( return entityRows; } + private List> retrieveFeatures( + List featureReferences, List> entityRows) { + // Prepare feature reference to index mapping. This mapping will be used to arrange the + // retrieved features to the same order as in the input. + if (featureReferences.isEmpty()) { + throw new RuntimeException("Requested features list must not be empty."); + } + Map featureReferenceToIndexMap = + new HashMap<>(featureReferences.size()); + for (int i = 0; i < featureReferences.size(); i++) { + FeatureReferenceV2 featureReference = featureReferences.get(i); + if (featureReferenceToIndexMap.containsKey(featureReference)) { + throw new RuntimeException( + String.format( + "Found duplicate features %s:%s.", + featureReference.getFeatureViewName(), featureReference.getFeatureName())); + } + featureReferenceToIndexMap.put(featureReference, i); + } + + // Create placeholders for retrieved features. + List> features = new ArrayList<>(entityRows.size()); + for (int i = 0; i < entityRows.size(); i++) { + List featuresPerEntity = + new ArrayList<>(featureReferences.size()); + for (int j = 0; j < featureReferences.size(); j++) { + featuresPerEntity.add(null); + } + features.add(featuresPerEntity); + } + + // Group feature references by join keys. + Map> groupNameToFeatureReferencesMap = + featureReferences.stream() + .collect( + Collectors.groupingBy( + featureReference -> + this.registryRepository.getEntitiesList(featureReference).stream() + .map(this.registryRepository::getEntityJoinKey) + .sorted() + .collect(Collectors.joining(",")))); + + // Retrieve features one group at a time. + for (List featureReferencesPerGroup : + groupNameToFeatureReferencesMap.values()) { + List entityNames = + this.registryRepository.getEntitiesList(featureReferencesPerGroup.get(0)); + List> entityRowsPerGroup = new ArrayList<>(entityRows.size()); + for (Map entityRow : entityRows) { + Map entityRowPerGroup = new HashMap<>(); + entityNames.stream() + .map(this.registryRepository::getEntityJoinKey) + .forEach( + joinKey -> { + if (joinKey.equals(DUMMY_ENTITY_ID)) { + entityRowPerGroup.put(joinKey, DUMMY_ENTITY_VALUE); + } else { + ValueProto.Value value = entityRow.get(joinKey); + if (value != null) { + entityRowPerGroup.put(joinKey, value); + } + } + }); + entityRowsPerGroup.add(entityRowPerGroup); + } + List> featuresPerGroup = + retriever.getOnlineFeatures(entityRowsPerGroup, featureReferencesPerGroup, entityNames); + for (int i = 0; i < featuresPerGroup.size(); i++) { + for (int j = 0; j < featureReferencesPerGroup.size(); j++) { + int k = featureReferenceToIndexMap.get(featureReferencesPerGroup.get(j)); + features.get(i).set(k, featuresPerGroup.get(i).get(j)); + } + } + } + + return features; + } + private void populateOnDemandFeatures( List onDemandFeatureReferences, List onDemandFeatureSources, diff --git a/java/serving/src/test/java/feast/serving/it/ServingBaseTests.java b/java/serving/src/test/java/feast/serving/it/ServingBaseTests.java index 30cba0cb06..66987e8c0d 100644 --- a/java/serving/src/test/java/feast/serving/it/ServingBaseTests.java +++ b/java/serving/src/test/java/feast/serving/it/ServingBaseTests.java @@ -172,5 +172,35 @@ public void shouldGetOnlineFeaturesWithStringEntity() { } } + @Test + public void shouldGetOnlineFeaturesFromAllFeatureViews() { + Map entityRows = + ImmutableMap.of( + "entity", + ValueProto.RepeatedValue.newBuilder() + .addVal(DataGenerator.createStrValue("key-1")) + .build(), + "driver_id", + ValueProto.RepeatedValue.newBuilder() + .addVal(DataGenerator.createInt64Value(1005)) + .build()); + + ImmutableList featureReferences = + ImmutableList.of( + "feature_view_0:feature_0", + "feature_view_0:feature_1", + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:avg_daily_trips"); + + ServingAPIProto.GetOnlineFeaturesRequest req = + TestUtils.createOnlineFeatureRequest(featureReferences, entityRows); + + ServingAPIProto.GetOnlineFeaturesResponse resp = servingStub.getOnlineFeatures(req); + + for (final int featureIdx : List.of(0, 1, 2, 3)) { + assertEquals(FieldStatus.PRESENT, resp.getResults(featureIdx).getStatuses(0)); + } + } + abstract void updateRegistryFile(RegistryProto.Registry registry); } diff --git a/java/serving/src/test/java/feast/serving/service/OnlineServingServiceTest.java b/java/serving/src/test/java/feast/serving/service/OnlineServingServiceTest.java index 64d2e20c9b..933e38f056 100644 --- a/java/serving/src/test/java/feast/serving/service/OnlineServingServiceTest.java +++ b/java/serving/src/test/java/feast/serving/service/OnlineServingServiceTest.java @@ -170,6 +170,8 @@ public void shouldReturnResponseWithValuesAndMetadataIfKeysPresent() { .thenReturn(featureSpecs.get(0)); when(registry.getFeatureSpec(mockedFeatureRows.get(3).getFeatureReference())) .thenReturn(featureSpecs.get(1)); + when(registry.getEntityJoinKey("entity1")).thenReturn("entity1"); + when(registry.getEntityJoinKey("entity2")).thenReturn("entity2"); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); @@ -237,6 +239,8 @@ public void shouldReturnResponseWithUnsetValuesAndMetadataIfKeysNotPresent() { .thenReturn(featureSpecs.get(0)); when(registry.getFeatureSpec(mockedFeatureRows.get(1).getFeatureReference())) .thenReturn(featureSpecs.get(1)); + when(registry.getEntityJoinKey("entity1")).thenReturn("entity1"); + when(registry.getEntityJoinKey("entity2")).thenReturn("entity2"); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); @@ -314,6 +318,8 @@ public void shouldReturnResponseWithValuesAndMetadataIfMaxAgeIsExceeded() { .thenReturn(featureSpecs.get(1)); when(registry.getFeatureSpec(mockedFeatureRows.get(5).getFeatureReference())) .thenReturn(featureSpecs.get(0)); + when(registry.getEntityJoinKey("entity1")).thenReturn("entity1"); + when(registry.getEntityJoinKey("entity2")).thenReturn("entity2"); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); From 51df8be5d3b9bc702393d00e9a6370c703510358 Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Thu, 30 Jun 2022 13:38:28 -0700 Subject: [PATCH 06/73] fix: Bump version of Guava to mitigate cve (#2896) * fix: Bump version of Guava to mitigate cve Signed-off-by: Achal Shah * 30.1 Signed-off-by: Achal Shah --- java/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/java/pom.xml b/java/pom.xml index 7ea4bc07bd..475e87ff13 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -70,7 +70,7 @@ 2.0.1.Final 0.21.0 1.6.6 - 29.0-jre + 30.1-jre Date: Thu, 30 Jun 2022 14:02:27 -0700 Subject: [PATCH 07/73] feat: Add column reordering to `write_to_offline_store` (#2876) * Add feature extraction logic to batch writer Signed-off-by: Felix Wang * Enable StreamProcessor to write to both online and offline stores Signed-off-by: Felix Wang * Fix incorrect columns error message Signed-off-by: Felix Wang * Reorder columns in _write_to_offline_store Signed-off-by: Felix Wang * Make _write_to_offline_store a public method Signed-off-by: Felix Wang * Import FeatureStore correctly Signed-off-by: Felix Wang * Remove defaults for `processing_time` and `query_timeout` Signed-off-by: Felix Wang * Clean up `test_offline_write.py` Signed-off-by: Felix Wang * Do not do any custom logic for double underscore columns Signed-off-by: Felix Wang * Lint Signed-off-by: Felix Wang * Switch entity values for all tests using push sources to not affect other tests Signed-off-by: Felix Wang --- sdk/python/feast/feature_store.py | 26 ++++- .../infra/contrib/spark_kafka_processor.py | 57 +++++++---- .../feast/infra/contrib/stream_processor.py | 38 +++++--- .../feast/infra/offline_stores/bigquery.py | 4 +- sdk/python/feast/infra/offline_stores/file.py | 4 +- .../feast/infra/offline_stores/redshift.py | 4 +- .../feast/infra/offline_stores/snowflake.py | 4 +- .../e2e/test_python_feature_server.py | 11 ++- .../offline_store/test_offline_write.py | 95 +++++++------------ .../test_push_offline_retrieval.py | 8 +- .../test_push_online_retrieval.py | 11 ++- 11 files changed, 153 insertions(+), 109 deletions(-) diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index de52b9e3f3..78431e2d61 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -1383,7 +1383,7 @@ def push( fv.name, df, allow_registry_cache=allow_registry_cache ) if to == PushMode.OFFLINE or to == PushMode.ONLINE_AND_OFFLINE: - self._write_to_offline_store( + self.write_to_offline_store( fv.name, df, allow_registry_cache=allow_registry_cache ) @@ -1415,14 +1415,18 @@ def write_to_online_store( provider.ingest_df(feature_view, entities, df) @log_exceptions_and_usage - def _write_to_offline_store( + def write_to_offline_store( self, feature_view_name: str, df: pd.DataFrame, allow_registry_cache: bool = True, + reorder_columns: bool = True, ): """ - ingests data directly into the Online store + Persists the dataframe directly into the batch data source for the given feature view. + + Fails if the dataframe columns do not match the columns of the batch data source. Optionally + reorders the columns of the dataframe to match. """ # TODO: restrict this to work with online StreamFeatureViews and validate the FeatureView type try: @@ -1433,7 +1437,21 @@ def _write_to_offline_store( feature_view = self.get_feature_view( feature_view_name, allow_registry_cache=allow_registry_cache ) - df.reset_index(drop=True) + + # Get columns of the batch source and the input dataframe. + column_names_and_types = feature_view.batch_source.get_table_column_names_and_types( + self.config + ) + source_columns = [column for column, _ in column_names_and_types] + input_columns = df.columns.values.tolist() + + if set(input_columns) != set(source_columns): + raise ValueError( + f"The input dataframe has columns {set(input_columns)} but the batch source has columns {set(source_columns)}." + ) + + if reorder_columns: + df = df.reindex(columns=source_columns) table = pa.Table.from_pandas(df) provider = self._get_provider() diff --git a/sdk/python/feast/infra/contrib/spark_kafka_processor.py b/sdk/python/feast/infra/contrib/spark_kafka_processor.py index 4dfb615773..32d91b2010 100644 --- a/sdk/python/feast/infra/contrib/spark_kafka_processor.py +++ b/sdk/python/feast/infra/contrib/spark_kafka_processor.py @@ -1,12 +1,14 @@ from types import MethodType -from typing import List +from typing import List, Optional +import pandas as pd from pyspark.sql import DataFrame, SparkSession from pyspark.sql.avro.functions import from_avro from pyspark.sql.functions import col, from_json from feast.data_format import AvroFormat, JsonFormat -from feast.data_source import KafkaSource +from feast.data_source import KafkaSource, PushMode +from feast.feature_store import FeatureStore from feast.infra.contrib.stream_processor import ( ProcessorConfig, StreamProcessor, @@ -24,16 +26,16 @@ class SparkProcessorConfig(ProcessorConfig): class SparkKafkaProcessor(StreamProcessor): spark: SparkSession format: str - write_function: MethodType + preprocess_fn: Optional[MethodType] join_keys: List[str] def __init__( self, + *, + fs: FeatureStore, sfv: StreamFeatureView, config: ProcessorConfig, - write_function: MethodType, - processing_time: str = "30 seconds", - query_timeout: int = 15, + preprocess_fn: Optional[MethodType] = None, ): if not isinstance(sfv.stream_source, KafkaSource): raise ValueError("data source is not kafka source") @@ -55,15 +57,16 @@ def __init__( if not isinstance(config, SparkProcessorConfig): raise ValueError("config is not spark processor config") self.spark = config.spark_session - self.write_function = write_function - self.processing_time = processing_time - self.query_timeout = query_timeout - super().__init__(sfv=sfv, data_source=sfv.stream_source) + self.preprocess_fn = preprocess_fn + self.processing_time = config.processing_time + self.query_timeout = config.query_timeout + self.join_keys = [fs.get_entity(entity).join_key for entity in sfv.entities] + super().__init__(fs=fs, sfv=sfv, data_source=sfv.stream_source) - def ingest_stream_feature_view(self) -> None: + def ingest_stream_feature_view(self, to: PushMode = PushMode.ONLINE) -> None: ingested_stream_df = self._ingest_stream_data() transformed_df = self._construct_transformation_plan(ingested_stream_df) - online_store_query = self._write_to_online_store(transformed_df) + online_store_query = self._write_stream_data(transformed_df, to) return online_store_query def _ingest_stream_data(self) -> StreamTable: @@ -119,13 +122,35 @@ def _ingest_stream_data(self) -> StreamTable: def _construct_transformation_plan(self, df: StreamTable) -> StreamTable: return self.sfv.udf.__call__(df) if self.sfv.udf else df - def _write_to_online_store(self, df: StreamTable): + def _write_stream_data(self, df: StreamTable, to: PushMode): # Validation occurs at the fs.write_to_online_store() phase against the stream feature view schema. def batch_write(row: DataFrame, batch_id: int): - pd_row = row.toPandas() - self.write_function( - pd_row, input_timestamp="event_timestamp", output_timestamp="" + rows: pd.DataFrame = row.toPandas() + + # Extract the latest feature values for each unique entity row (i.e. the join keys). + # Also add a 'created' column. + rows = ( + rows.sort_values( + by=self.join_keys + [self.sfv.timestamp_field], ascending=True + ) + .groupby(self.join_keys) + .nth(0) ) + rows["created"] = pd.to_datetime("now", utc=True) + + # Reset indices to ensure the dataframe has all the required columns. + rows = rows.reset_index() + + # Optionally execute preprocessor before writing to the online store. + if self.preprocess_fn: + rows = self.preprocess_fn(rows) + + # Finally persist the data to the online store and/or offline store. + if rows.size > 0: + if to == PushMode.ONLINE or to == PushMode.ONLINE_AND_OFFLINE: + self.fs.write_to_online_store(self.sfv.name, rows) + if to == PushMode.OFFLINE or to == PushMode.ONLINE_AND_OFFLINE: + self.fs.write_to_offline_store(self.sfv.name, rows) query = ( df.writeStream.outputMode("update") diff --git a/sdk/python/feast/infra/contrib/stream_processor.py b/sdk/python/feast/infra/contrib/stream_processor.py index 2ccf1e59f8..24817c82ea 100644 --- a/sdk/python/feast/infra/contrib/stream_processor.py +++ b/sdk/python/feast/infra/contrib/stream_processor.py @@ -1,14 +1,17 @@ from abc import ABC -from typing import Callable +from types import MethodType +from typing import TYPE_CHECKING, Optional -import pandas as pd from pyspark.sql import DataFrame -from feast.data_source import DataSource +from feast.data_source import DataSource, PushMode from feast.importer import import_class from feast.repo_config import FeastConfigBaseModel from feast.stream_feature_view import StreamFeatureView +if TYPE_CHECKING: + from feast.feature_store import FeatureStore + STREAM_PROCESSOR_CLASS_FOR_TYPE = { ("spark", "kafka"): "feast.infra.contrib.spark_kafka_processor.SparkKafkaProcessor", } @@ -30,21 +33,26 @@ class StreamProcessor(ABC): and persist that data to the online store. Attributes: + fs: The feature store where data should be persisted. sfv: The stream feature view on which the stream processor operates. data_source: The stream data source from which data will be ingested. """ + fs: "FeatureStore" sfv: StreamFeatureView data_source: DataSource - def __init__(self, sfv: StreamFeatureView, data_source: DataSource): + def __init__( + self, fs: "FeatureStore", sfv: StreamFeatureView, data_source: DataSource + ): + self.fs = fs self.sfv = sfv self.data_source = data_source - def ingest_stream_feature_view(self) -> None: + def ingest_stream_feature_view(self, to: PushMode = PushMode.ONLINE) -> None: """ Ingests data from the stream source attached to the stream feature view; transforms the data - and then persists it to the online store. + and then persists it to the online store and/or offline store, depending on the 'to' parameter. """ pass @@ -62,26 +70,32 @@ def _construct_transformation_plan(self, table: StreamTable) -> StreamTable: """ pass - def _write_to_online_store(self, table: StreamTable) -> None: + def _write_stream_data(self, table: StreamTable, to: PushMode) -> None: """ - Returns query for persisting data to the online store. + Launches a job to persist stream data to the online store and/or offline store, depending + on the 'to' parameter, and returns a handle for the job. """ pass def get_stream_processor_object( config: ProcessorConfig, + fs: "FeatureStore", sfv: StreamFeatureView, - write_function: Callable[[pd.DataFrame, str, str], None], + preprocess_fn: Optional[MethodType] = None, ): """ - Returns a stream processor object based on the config mode and stream source type. The write function is a - function that wraps the feature store "write_to_online_store" capability. + Returns a stream processor object based on the config. + + The returned object will be capable of launching an ingestion job that reads data from the + given stream feature view's stream source, transforms it if the stream feature view has a + transformation, and then writes it to the online store. It will also preprocess the data + if a preprocessor method is defined. """ if config.mode == "spark" and config.source == "kafka": stream_processor = STREAM_PROCESSOR_CLASS_FOR_TYPE[("spark", "kafka")] module_name, class_name = stream_processor.rsplit(".", 1) cls = import_class(module_name, class_name, "StreamProcessor") - return cls(sfv=sfv, config=config, write_function=write_function,) + return cls(fs=fs, sfv=sfv, config=config, preprocess_fn=preprocess_fn) else: raise ValueError("other processors besides spark-kafka not supported") diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index 259a3af7d9..cb5b3a045a 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -329,8 +329,8 @@ def offline_write_batch( ) if column_names != table.column_names: raise ValueError( - f"The input pyarrow table has schema {pa_schema} with the incorrect columns {column_names}. " - f"The columns are expected to be (in this order): {column_names}." + f"The input pyarrow table has schema {table.schema} with the incorrect columns {table.column_names}. " + f"The schema is expected to be {pa_schema} with the columns (in this exact order) to be {column_names}." ) if table.schema != pa_schema: diff --git a/sdk/python/feast/infra/offline_stores/file.py b/sdk/python/feast/infra/offline_stores/file.py index 75968146de..10012c2d80 100644 --- a/sdk/python/feast/infra/offline_stores/file.py +++ b/sdk/python/feast/infra/offline_stores/file.py @@ -430,8 +430,8 @@ def offline_write_batch( ) if column_names != table.column_names: raise ValueError( - f"The input pyarrow table has schema {pa_schema} with the incorrect columns {column_names}. " - f"The columns are expected to be (in this order): {column_names}." + f"The input pyarrow table has schema {table.schema} with the incorrect columns {table.column_names}. " + f"The schema is expected to be {pa_schema} with the columns (in this exact order) to be {column_names}." ) file_options = feature_view.batch_source.file_options diff --git a/sdk/python/feast/infra/offline_stores/redshift.py b/sdk/python/feast/infra/offline_stores/redshift.py index 8667989268..5f071a814f 100644 --- a/sdk/python/feast/infra/offline_stores/redshift.py +++ b/sdk/python/feast/infra/offline_stores/redshift.py @@ -323,8 +323,8 @@ def offline_write_batch( ) if column_names != table.column_names: raise ValueError( - f"The input pyarrow table has schema {pa_schema} with the incorrect columns {column_names}. " - f"The columns are expected to be (in this order): {column_names}." + f"The input pyarrow table has schema {table.schema} with the incorrect columns {table.column_names}. " + f"The schema is expected to be {pa_schema} with the columns (in this exact order) to be {column_names}." ) if table.schema != pa_schema: diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index ec06d8dce1..a5befc33e2 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -332,8 +332,8 @@ def offline_write_batch( ) if column_names != table.column_names: raise ValueError( - f"The input pyarrow table has schema {pa_schema} with the incorrect columns {column_names}. " - f"The columns are expected to be (in this order): {column_names}." + f"The input pyarrow table has schema {table.schema} with the incorrect columns {table.column_names}. " + f"The schema is expected to be {pa_schema} with the columns (in this exact order) to be {column_names}." ) if table.schema != pa_schema: diff --git a/sdk/python/tests/integration/e2e/test_python_feature_server.py b/sdk/python/tests/integration/e2e/test_python_feature_server.py index ea4c35a1ca..7195594d02 100644 --- a/sdk/python/tests/integration/e2e/test_python_feature_server.py +++ b/sdk/python/tests/integration/e2e/test_python_feature_server.py @@ -63,13 +63,16 @@ def test_get_online_features(python_fs_client): @pytest.mark.integration @pytest.mark.universal_online_stores def test_push(python_fs_client): - initial_temp = get_temperatures(python_fs_client, location_ids=[1])[0] + # TODO(felixwang9817): Note that we choose an entity value of 102 here since it is not included + # in the existing range of entity values (1-49). This allows us to push data for this test + # without affecting other tests. This decision is tech debt, and should be resolved by finding a + # better way to isolate data sources across tests. json_data = json.dumps( { "push_source_name": "location_stats_push_source", "df": { - "location_id": [1], - "temperature": [initial_temp * 100], + "location_id": [102], + "temperature": [4], "event_timestamp": [str(datetime.utcnow())], "created": [str(datetime.utcnow())], }, @@ -79,7 +82,7 @@ def test_push(python_fs_client): # Check new pushed temperature is fetched assert response.status_code == 200 - assert get_temperatures(python_fs_client, location_ids=[1]) == [initial_temp * 100] + assert get_temperatures(python_fs_client, location_ids=[102]) == [4] def get_temperatures(client, location_ids: List[int]): diff --git a/sdk/python/tests/integration/offline_store/test_offline_write.py b/sdk/python/tests/integration/offline_store/test_offline_write.py index 30ead98389..3335da0df7 100644 --- a/sdk/python/tests/integration/offline_store/test_offline_write.py +++ b/sdk/python/tests/integration/offline_store/test_offline_write.py @@ -9,69 +9,25 @@ from feast.types import Float32, Int32 from tests.integration.feature_repos.universal.entities import driver - -@pytest.mark.integration -@pytest.mark.universal_offline_stores(only=["file", "redshift"]) -@pytest.mark.universal_online_stores(only=["sqlite"]) -def test_writing_columns_in_incorrect_order_fails(environment, universal_data_sources): - # TODO(kevjumba) handle incorrect order later, for now schema must be in the order that the filesource is in - store = environment.feature_store - _, _, data_sources = universal_data_sources - driver_stats = FeatureView( - name="driver_stats", - entities=["driver"], - schema=[ - Field(name="avg_daily_trips", dtype=Int32), - Field(name="conv_rate", dtype=Float32), - ], - source=data_sources.driver, - ) - - now = datetime.utcnow() - ts = pd.Timestamp(now).round("ms") - - entity_df = pd.DataFrame.from_dict( - {"driver_id": [1001, 1002], "event_timestamp": [ts - timedelta(hours=3), ts]} - ) - - store.apply([driver(), driver_stats]) - df = store.get_historical_features( - entity_df=entity_df, - features=["driver_stats:conv_rate", "driver_stats:avg_daily_trips"], - full_feature_names=False, - ).to_df() - - assert df["conv_rate"].isnull().all() - assert df["avg_daily_trips"].isnull().all() - - expected_df = pd.DataFrame.from_dict( - { - "driver_id": [1001, 1002], - "event_timestamp": [ts - timedelta(hours=3), ts], - "conv_rate": [random.random(), random.random()], - "avg_daily_trips": [random.randint(0, 10), random.randint(0, 10)], - "created": [ts, ts], - }, - ) - with pytest.raises(ValueError): - store._write_to_offline_store( - driver_stats.name, expected_df, allow_registry_cache=False - ) +# TODO(felixwang9817): Add a unit test that checks that write_to_offline_store can reorder columns. +# This should only happen after https://github.com/feast-dev/feast/issues/2797 is fixed. @pytest.mark.integration -@pytest.mark.universal_offline_stores(only=["file", "redshift"]) +@pytest.mark.universal_offline_stores @pytest.mark.universal_online_stores(only=["sqlite"]) def test_writing_incorrect_schema_fails(environment, universal_data_sources): - # TODO(kevjumba) handle incorrect order later, for now schema must be in the order that the filesource is in + """Tests that writing a dataframe with an incorrect schema fails.""" store = environment.feature_store _, _, data_sources = universal_data_sources + driver_entity = driver() driver_stats = FeatureView( name="driver_stats", - entities=["driver"], + entities=[driver_entity], schema=[ Field(name="avg_daily_trips", dtype=Int32), Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), ], source=data_sources.driver, ) @@ -83,14 +39,19 @@ def test_writing_incorrect_schema_fails(environment, universal_data_sources): {"driver_id": [1001, 1002], "event_timestamp": [ts - timedelta(hours=3), ts]} ) - store.apply([driver(), driver_stats]) + store.apply([driver_entity, driver_stats]) df = store.get_historical_features( entity_df=entity_df, - features=["driver_stats:conv_rate", "driver_stats:avg_daily_trips"], + features=[ + "driver_stats:conv_rate", + "driver_stats:acc_rate", + "driver_stats:avg_daily_trips", + ], full_feature_names=False, ).to_df() assert df["conv_rate"].isnull().all() + assert df["acc_rate"].isnull().all() assert df["avg_daily_trips"].isnull().all() expected_df = pd.DataFrame.from_dict( @@ -103,7 +64,7 @@ def test_writing_incorrect_schema_fails(environment, universal_data_sources): }, ) with pytest.raises(ValueError): - store._write_to_offline_store( + store.write_to_offline_store( driver_stats.name, expected_df, allow_registry_cache=False ) @@ -114,9 +75,10 @@ def test_writing_incorrect_schema_fails(environment, universal_data_sources): def test_writing_consecutively_to_offline_store(environment, universal_data_sources): store = environment.feature_store _, _, data_sources = universal_data_sources + driver_entity = driver() driver_stats = FeatureView( name="driver_stats", - entities=["driver"], + entities=[driver_entity], schema=[ Field(name="avg_daily_trips", dtype=Int32), Field(name="conv_rate", dtype=Float32), @@ -138,14 +100,19 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour } ) - store.apply([driver(), driver_stats]) + store.apply([driver_entity, driver_stats]) df = store.get_historical_features( entity_df=entity_df, - features=["driver_stats:conv_rate", "driver_stats:avg_daily_trips"], + features=[ + "driver_stats:conv_rate", + "driver_stats:acc_rate", + "driver_stats:avg_daily_trips", + ], full_feature_names=False, ).to_df() assert df["conv_rate"].isnull().all() + assert df["acc_rate"].isnull().all() assert df["avg_daily_trips"].isnull().all() first_df = pd.DataFrame.from_dict( @@ -158,13 +125,17 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour "created": [ts, ts], }, ) - store._write_to_offline_store( + store.write_to_offline_store( driver_stats.name, first_df, allow_registry_cache=False ) after_write_df = store.get_historical_features( entity_df=entity_df, - features=["driver_stats:conv_rate", "driver_stats:avg_daily_trips"], + features=[ + "driver_stats:conv_rate", + "driver_stats:acc_rate", + "driver_stats:avg_daily_trips", + ], full_feature_names=False, ).to_df() @@ -173,6 +144,10 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour after_write_df["conv_rate"].reset_index(drop=True) == first_df["conv_rate"].reset_index(drop=True) ) + assert np.where( + after_write_df["acc_rate"].reset_index(drop=True) + == first_df["acc_rate"].reset_index(drop=True) + ) assert np.where( after_write_df["avg_daily_trips"].reset_index(drop=True) == first_df["avg_daily_trips"].reset_index(drop=True) @@ -189,7 +164,7 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour }, ) - store._write_to_offline_store( + store.write_to_offline_store( driver_stats.name, second_df, allow_registry_cache=False ) diff --git a/sdk/python/tests/integration/offline_store/test_push_offline_retrieval.py b/sdk/python/tests/integration/offline_store/test_push_offline_retrieval.py index 5cea8a36ef..23bb0f98a7 100644 --- a/sdk/python/tests/integration/offline_store/test_push_offline_retrieval.py +++ b/sdk/python/tests/integration/offline_store/test_push_offline_retrieval.py @@ -26,7 +26,7 @@ def test_push_features_and_read_from_offline_store(environment, universal_data_s now = pd.Timestamp(datetime.datetime.utcnow()).round("ms") store.apply([driver(), customer(), location(), *feature_views.values()]) - entity_df = pd.DataFrame.from_dict({"location_id": [1], "event_timestamp": [now]}) + entity_df = pd.DataFrame.from_dict({"location_id": [100], "event_timestamp": [now]}) before_df = store.get_historical_features( entity_df=entity_df, @@ -34,9 +34,13 @@ def test_push_features_and_read_from_offline_store(environment, universal_data_s full_feature_names=False, ).to_df() + # TODO(felixwang9817): Note that we choose an entity value of 100 here since it is not included + # in the existing range of entity values (1-49). This allows us to push data for this test + # without affecting other tests. This decision is tech debt, and should be resolved by finding a + # better way to isolate data sources across tests. data = { "event_timestamp": [now], - "location_id": [1], + "location_id": [100], "temperature": [4], "created": [now], } diff --git a/sdk/python/tests/integration/online_store/test_push_online_retrieval.py b/sdk/python/tests/integration/online_store/test_push_online_retrieval.py index aa7e3e7f53..436f87715f 100644 --- a/sdk/python/tests/integration/online_store/test_push_online_retrieval.py +++ b/sdk/python/tests/integration/online_store/test_push_online_retrieval.py @@ -22,8 +22,13 @@ def test_push_features_and_read(environment, universal_data_sources): feature_views = construct_universal_feature_views(data_sources) store.apply([driver(), customer(), location(), *feature_views.values()]) + + # TODO(felixwang9817): Note that we choose an entity value of 101 here since it is not included + # in the existing range of entity values (1-49). This allows us to push data for this test + # without affecting other tests. This decision is tech debt, and should be resolved by finding a + # better way to isolate data sources across tests. data = { - "location_id": [1], + "location_id": [101], "temperature": [4], "event_timestamp": [pd.Timestamp(datetime.datetime.utcnow()).round("ms")], "created": [pd.Timestamp(datetime.datetime.utcnow()).round("ms")], @@ -34,8 +39,8 @@ def test_push_features_and_read(environment, universal_data_sources): online_resp = store.get_online_features( features=["pushable_location_stats:temperature"], - entity_rows=[{"location_id": 1}], + entity_rows=[{"location_id": 101}], ) online_resp_dict = online_resp.to_dict() - assert online_resp_dict["location_id"] == [1] + assert online_resp_dict["location_id"] == [101] assert online_resp_dict["temperature"] == [4] From 9b97fca876d9520d6e1f9025562036330cc0aabd Mon Sep 17 00:00:00 2001 From: kindalime Date: Fri, 1 Jul 2022 12:18:29 -0700 Subject: [PATCH 08/73] feat: Add pages for individual Features to the Feast UI (#2850) * Add feature page functionality Signed-off-by: Daniel Kim * Add links in feature view and feature pages Signed-off-by: Daniel Kim * Modify Feast provider test to include new Feature pages Signed-off-by: Daniel Kim * Add initial version of test Signed-off-by: Daniel Kim * Make some changes to test and remove feature tab functionality from ondemand FVs Signed-off-by: Daniel Kim * Change feature link EuiLinks to EuiCustomLinks Signed-off-by: Daniel Kim * Change other links to EuiCustomLinks Signed-off-by: Daniel Kim --- ui/src/FeastUISansProviders.test.tsx | 43 ++++++++++ ui/src/FeastUISansProviders.tsx | 9 +- ui/src/components/FeaturesListDisplay.tsx | 21 ++++- ui/src/components/TagSearch.tsx | 2 +- ui/src/custom-tabs/TabsRegistryContext.tsx | 31 ++++++- .../feature-demo-tab/DemoCustomTab.tsx | 83 +++++++++++++++++++ .../feature-demo-tab/useDemoQuery.tsx | 44 ++++++++++ ui/src/custom-tabs/types.ts | 20 ++++- ui/src/graphics/FeatureIcon.tsx | 52 ++++++++++++ ui/src/index.tsx | 8 ++ .../OnDemandFeatureViewOverviewTab.tsx | 6 +- .../RegularFeatureViewOverviewTab.tsx | 4 +- ui/src/pages/features/FeatureInstance.tsx | 62 ++++++++++++++ ui/src/pages/features/FeatureOverviewTab.tsx | 71 ++++++++++++++++ ui/src/pages/features/FeatureRawData.tsx | 25 ++++++ ui/src/pages/features/useLoadFeature.ts | 29 +++++++ ui/src/parsers/feastFeatures.ts | 11 +++ ui/src/parsers/jsonType.ts | 11 +++ .../FeatureCustomTabLoadingWrapper.tsx | 37 +++++++++ 19 files changed, 556 insertions(+), 13 deletions(-) create mode 100644 ui/src/custom-tabs/feature-demo-tab/DemoCustomTab.tsx create mode 100644 ui/src/custom-tabs/feature-demo-tab/useDemoQuery.tsx create mode 100644 ui/src/graphics/FeatureIcon.tsx create mode 100644 ui/src/pages/features/FeatureInstance.tsx create mode 100644 ui/src/pages/features/FeatureOverviewTab.tsx create mode 100644 ui/src/pages/features/FeatureRawData.tsx create mode 100644 ui/src/pages/features/useLoadFeature.ts create mode 100644 ui/src/parsers/feastFeatures.ts create mode 100644 ui/src/parsers/jsonType.ts create mode 100644 ui/src/utils/custom-tabs/FeatureCustomTabLoadingWrapper.tsx diff --git a/ui/src/FeastUISansProviders.test.tsx b/ui/src/FeastUISansProviders.test.tsx index 1289cea028..09985bc133 100644 --- a/ui/src/FeastUISansProviders.test.tsx +++ b/ui/src/FeastUISansProviders.test.tsx @@ -94,3 +94,46 @@ test("routes are reachable", async () => { }); } }); + + +const featureViewName = registry.featureViews[0].spec.name; +const featureName = registry.featureViews[0].spec.features[0].name; + +test("features are reachable", async () => { + render(); + + // Wait for content to load + await screen.findByText(/Explore this Project/i); + const routeRegExp = new RegExp("Feature Views", "i"); + + userEvent.click( + screen.getByRole("button", { name: routeRegExp }), + leftClick + ); + + screen.getByRole("heading", { + name: "Feature Views", + }); + + await screen.findAllByText(/Feature Views/i); + const fvRegExp = new RegExp(featureViewName, "i"); + + userEvent.click( + screen.getByRole("link", { name: fvRegExp }), + leftClick + ) + + await screen.findByText(featureName); + const fRegExp = new RegExp(featureName, "i"); + + userEvent.click( + screen.getByRole("link", { name: fRegExp }), + leftClick + ) + // Should land on a page with the heading + // await screen.findByText("Feature: " + featureName); + screen.getByRole("heading", { + name: "Feature: " + featureName, + level: 1, + }); +}); diff --git a/ui/src/FeastUISansProviders.tsx b/ui/src/FeastUISansProviders.tsx index 628068f0f0..8a0e0b94db 100644 --- a/ui/src/FeastUISansProviders.tsx +++ b/ui/src/FeastUISansProviders.tsx @@ -13,6 +13,7 @@ import DatasourceIndex from "./pages/data-sources/Index"; import DatasetIndex from "./pages/saved-data-sets/Index"; import EntityIndex from "./pages/entities/Index"; import EntityInstance from "./pages/entities/EntityInstance"; +import FeatureInstance from "./pages/features/FeatureInstance"; import FeatureServiceIndex from "./pages/feature-services/Index"; import FeatureViewIndex from "./pages/feature-views/Index"; import FeatureViewInstance from "./pages/feature-views/FeatureViewInstance"; @@ -86,10 +87,12 @@ const FeastUISansProviders = ({ path="feature-view/" element={} /> + }> + } - /> + path="feature-view/:FeatureViewName/feature/:FeatureName/*" + element={} + /> } diff --git a/ui/src/components/FeaturesListDisplay.tsx b/ui/src/components/FeaturesListDisplay.tsx index abd9c1d2e4..dcb6ba81eb 100644 --- a/ui/src/components/FeaturesListDisplay.tsx +++ b/ui/src/components/FeaturesListDisplay.tsx @@ -4,25 +4,42 @@ import { FeastFeatureColumnType } from "../parsers/feastFeatureViews"; import useLoadFeatureViewSummaryStatistics from "../queries/useLoadFeatureViewSummaryStatistics"; import SparklineHistogram from "./SparklineHistogram"; import FeatureFlagsContext from "../contexts/FeatureFlagsContext"; +import EuiCustomLink from "./EuiCustomLink"; interface FeaturesListProps { + projectName: string; featureViewName: string; features: FeastFeatureColumnType[]; + link: boolean; } -const FeaturesList = ({ featureViewName, features }: FeaturesListProps) => { +const FeaturesList = ({ projectName, featureViewName, features, link }: FeaturesListProps) => { const { enabledFeatureStatistics } = useContext(FeatureFlagsContext); const { isLoading, isError, isSuccess, data } = useLoadFeatureViewSummaryStatistics(featureViewName); let columns: { name: string; render?: any; field: any }[] = [ - { name: "Name", field: "name" }, + { + name: "Name", + field: "name", + render: (item: string) => ( + + {item} + + ) + }, { name: "Value Type", field: "valueType", }, ]; + if (!link) { + columns[0].render = undefined; + } + if (enabledFeatureStatistics) { columns.push( ...[ diff --git a/ui/src/components/TagSearch.tsx b/ui/src/components/TagSearch.tsx index e89d4a44cc..e3f7cdd98f 100644 --- a/ui/src/components/TagSearch.tsx +++ b/ui/src/components/TagSearch.tsx @@ -163,7 +163,7 @@ const TagSearch = ({ // HTMLInputElement is hooked into useInputHack inputNode.current = node; }, - onfocus: () => { + onFocus: () => { setHasFocus(true); }, fullWidth: true, diff --git a/ui/src/custom-tabs/TabsRegistryContext.tsx b/ui/src/custom-tabs/TabsRegistryContext.tsx index a5321e9c40..9f493e6d11 100644 --- a/ui/src/custom-tabs/TabsRegistryContext.tsx +++ b/ui/src/custom-tabs/TabsRegistryContext.tsx @@ -11,6 +11,7 @@ import { import RegularFeatureViewCustomTabLoadingWrapper from "../utils/custom-tabs/RegularFeatureViewCustomTabLoadingWrapper"; import OnDemandFeatureViewCustomTabLoadingWrapper from "../utils/custom-tabs/OnDemandFeatureViewCustomTabLoadingWrapper"; import FeatureServiceCustomTabLoadingWrapper from "../utils/custom-tabs/FeatureServiceCustomTabLoadingWrapper"; +import FeatureCustomTabLoadingWrapper from "../utils/custom-tabs/FeatureCustomTabLoadingWrapper"; import DataSourceCustomTabLoadingWrapper from "../utils/custom-tabs/DataSourceCustomTabLoadingWrapper"; import EntityCustomTabLoadingWrapper from "../utils/custom-tabs/EntityCustomTabLoadingWrapper"; import DatasetCustomTabLoadingWrapper from "../utils/custom-tabs/DatasetCustomTabLoadingWrapper"; @@ -19,6 +20,7 @@ import { RegularFeatureViewCustomTabRegistrationInterface, OnDemandFeatureViewCustomTabRegistrationInterface, FeatureServiceCustomTabRegistrationInterface, + FeatureCustomTabRegistrationInterface, DataSourceCustomTabRegistrationInterface, EntityCustomTabRegistrationInterface, DatasetCustomTabRegistrationInterface, @@ -29,6 +31,7 @@ interface FeastTabsRegistryInterface { RegularFeatureViewCustomTabs?: RegularFeatureViewCustomTabRegistrationInterface[]; OnDemandFeatureViewCustomTabs?: OnDemandFeatureViewCustomTabRegistrationInterface[]; FeatureServiceCustomTabs?: FeatureServiceCustomTabRegistrationInterface[]; + FeatureCustomTabs?: FeatureCustomTabRegistrationInterface[]; DataSourceCustomTabs?: DataSourceCustomTabRegistrationInterface[]; EntityCustomTabs?: EntityCustomTabRegistrationInterface[]; DatasetCustomTabs?: DatasetCustomTabRegistrationInterface[]; @@ -154,6 +157,15 @@ const useFeatureServiceCustomTabs = (navigate: NavigateFunction) => { ); }; +const useFeatureCustomTabs = (navigate: NavigateFunction) => { + const { FeatureCustomTabs } = React.useContext(TabsRegistryContext); + + return useGenericCustomTabsNavigation( + FeatureCustomTabs || [], + navigate + ); +}; + const useDataSourceCustomTabs = (navigate: NavigateFunction) => { const { DataSourceCustomTabs } = React.useContext(TabsRegistryContext); @@ -211,6 +223,15 @@ const useFeatureServiceCustomTabRoutes = () => { ); }; +const useEntityCustomTabRoutes = () => { + const { EntityCustomTabs } = React.useContext(TabsRegistryContext); + + return genericCustomTabRoutes( + EntityCustomTabs || [], + EntityCustomTabLoadingWrapper + ); +}; + const useDataSourceCustomTabRoutes = () => { const { DataSourceCustomTabs } = React.useContext(TabsRegistryContext); @@ -220,12 +241,12 @@ const useDataSourceCustomTabRoutes = () => { ); }; -const useEntityCustomTabRoutes = () => { - const { EntityCustomTabs } = React.useContext(TabsRegistryContext); +const useFeatureCustomTabRoutes = () => { + const { FeatureCustomTabs } = React.useContext(TabsRegistryContext); return genericCustomTabRoutes( - EntityCustomTabs || [], - EntityCustomTabLoadingWrapper + FeatureCustomTabs || [], + FeatureCustomTabLoadingWrapper ); }; @@ -244,6 +265,7 @@ export { useRegularFeatureViewCustomTabs, useOnDemandFeatureViewCustomTabs, useFeatureServiceCustomTabs, + useFeatureCustomTabs, useDataSourceCustomTabs, useEntityCustomTabs, useDatasetCustomTabs, @@ -251,6 +273,7 @@ export { useRegularFeatureViewCustomTabRoutes, useOnDemandFeatureViewCustomTabRoutes, useFeatureServiceCustomTabRoutes, + useFeatureCustomTabRoutes, useDataSourceCustomTabRoutes, useEntityCustomTabRoutes, useDatasetCustomTabRoutes, diff --git a/ui/src/custom-tabs/feature-demo-tab/DemoCustomTab.tsx b/ui/src/custom-tabs/feature-demo-tab/DemoCustomTab.tsx new file mode 100644 index 0000000000..fda920daf3 --- /dev/null +++ b/ui/src/custom-tabs/feature-demo-tab/DemoCustomTab.tsx @@ -0,0 +1,83 @@ +import React from "react"; + +import { + // Feature View Custom Tabs will get these props + FeatureCustomTabProps, +} from "../types"; + +import { + EuiLoadingContent, + EuiEmptyPrompt, + EuiFlexGroup, + EuiFlexItem, + EuiCode, + EuiSpacer, +} from "@elastic/eui"; + +// Separating out the query is not required, +// but encouraged for code readability +import useDemoQuery from "./useDemoQuery"; + +const DemoCustomTab = ({ id, feastObjectQuery }: FeatureCustomTabProps) => { + // Use React Query to fetch data + // that is custom to this tab. + // See: https://react-query.tanstack.com/guides/queries + + const { isLoading, isError, isSuccess, data } = useDemoQuery({ + featureView: id, + }); + + if (isLoading) { + // Handle Loading State + // https://elastic.github.io/eui/#/display/loading + return ; + } + + if (isError) { + // Handle Data Fetching Error + // https://elastic.github.io/eui/#/display/empty-prompt + return ( + Unable to load your demo page} + body={ +

+ There was an error loading the Dashboard application. Contact your + administrator for help. +

+ } + /> + ); + } + + // Feast UI uses the Elastic UI component system. + // and are particularly + // useful for layouts. + return ( + + + +

Hello World. The following is fetched data.

+ + {isSuccess && data && ( + +
{JSON.stringify(data, null, 2)}
+
+ )} +
+ +

... and this is data from Feast UI’s own query.

+ + {feastObjectQuery.isSuccess && feastObjectQuery.featureData && ( + +
{JSON.stringify(feastObjectQuery.featureData, null, 2)}
+
+ )} +
+
+
+ ); +}; + +export default DemoCustomTab; diff --git a/ui/src/custom-tabs/feature-demo-tab/useDemoQuery.tsx b/ui/src/custom-tabs/feature-demo-tab/useDemoQuery.tsx new file mode 100644 index 0000000000..b93602dbe3 --- /dev/null +++ b/ui/src/custom-tabs/feature-demo-tab/useDemoQuery.tsx @@ -0,0 +1,44 @@ +import { useQuery } from "react-query"; +import { z } from "zod"; + +// Use Zod to check the shape of the +// json object being loaded +const demoSchema = z.object({ + hello: z.string(), + name: z.string().optional(), +}); + +// Make the type of the object available +type DemoDataType = z.infer; + +interface DemoQueryInterface { + featureView: string | undefined; +} + +const useDemoQuery = ({ featureView }: DemoQueryInterface) => { + // React Query manages caching for you based on query keys + // See: https://react-query.tanstack.com/guides/query-keys + const queryKey = `demo-tab-namespace:${featureView}`; + + // Pass the type to useQuery + // so that components consuming the + // result gets nice type hints + // on the other side. + return useQuery( + queryKey, + () => { + // Customizing the URL based on your needs + const url = `/demo-custom-tabs/demo.json`; + + return fetch(url) + .then((res) => res.json()) + .then((data) => demoSchema.parse(data)); // Use zod to parse results + }, + { + enabled: !!featureView, // Only start the query when the variable is not undefined + } + ); +}; + +export default useDemoQuery; +export type { DemoDataType }; diff --git a/ui/src/custom-tabs/types.ts b/ui/src/custom-tabs/types.ts index f80c56d0e2..1e555d6185 100644 --- a/ui/src/custom-tabs/types.ts +++ b/ui/src/custom-tabs/types.ts @@ -2,6 +2,7 @@ import { useLoadOnDemandFeatureView, useLoadRegularFeatureView, } from "../pages/feature-views/useLoadFeatureView"; +import useLoadFeature from "../pages/features/useLoadFeature"; import useLoadFeatureService from "../pages/feature-services/useLoadFeatureService"; import useLoadDataSource from "../pages/data-sources/useLoadDataSource"; import useLoadEntity from "../pages/entities/useLoadEntity"; @@ -47,7 +48,7 @@ interface OnDemandFeatureViewCustomTabRegistrationInterface }: OnDemandFeatureViewCustomTabProps) => JSX.Element; } -// Type for Feature Service Custom Tabs +// Type for Entity Custom Tabs interface EntityCustomTabProps { id: string | undefined; feastObjectQuery: ReturnType; @@ -61,6 +62,21 @@ interface EntityCustomTabRegistrationInterface }: EntityCustomTabProps) => JSX.Element; } +// Type for Feature Custom Tabs +interface FeatureCustomTabProps { + id: string | undefined; + feastObjectQuery: ReturnType; +} +interface FeatureCustomTabRegistrationInterface + extends CustomTabRegistrationInterface { + Component: ({ + id, + feastObjectQuery, + ...args + }: FeatureCustomTabProps) => JSX.Element; +} + + // Type for Feature Service Custom Tabs interface FeatureServiceCustomTabProps { id: string | undefined; @@ -117,6 +133,8 @@ export type { DataSourceCustomTabProps, EntityCustomTabRegistrationInterface, EntityCustomTabProps, + FeatureCustomTabRegistrationInterface, + FeatureCustomTabProps, DatasetCustomTabRegistrationInterface, DatasetCustomTabProps, }; diff --git a/ui/src/graphics/FeatureIcon.tsx b/ui/src/graphics/FeatureIcon.tsx new file mode 100644 index 0000000000..e2e06749bc --- /dev/null +++ b/ui/src/graphics/FeatureIcon.tsx @@ -0,0 +1,52 @@ +import React from "react"; + +const FeatureIcon = ({ + size, + className, +}: { + size: number; + className?: string; +}) => { + return ( + + + + + + + + ); +}; + +const FeatureIcon16 = () => { + return ; +}; + +const FeatureIcon32 = () => { + return ( + + ); +}; + +export { FeatureIcon, FeatureIcon16, FeatureIcon32 }; diff --git a/ui/src/index.tsx b/ui/src/index.tsx index 3a6269a8b7..8cd73cf094 100644 --- a/ui/src/index.tsx +++ b/ui/src/index.tsx @@ -21,6 +21,7 @@ import FSDemoCustomTab from "./custom-tabs/feature-service-demo-tab/DemoCustomTa import DSDemoCustomTab from "./custom-tabs/data-source-demo-tab/DemoCustomTab"; import EntDemoCustomTab from "./custom-tabs/entity-demo-tab/DemoCustomTab"; import DatasetDemoCustomTab from "./custom-tabs/dataset-demo-tab/DemoCustomTab"; +import FDemoCustomTab from "./custom-tabs/feature-demo-tab/DemoCustomTab"; const queryClient = new QueryClient(); @@ -67,6 +68,13 @@ const tabsRegistry = { Component: DatasetDemoCustomTab, }, ], + FeatureCustomTabs: [ + { + label: "Custom Tab Demo", + path: "demo-tab", + Component: FDemoCustomTab, + }, + ], }; ReactDOM.render( diff --git a/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx b/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx index 1ea509d8df..0922f62102 100644 --- a/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx +++ b/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx @@ -15,6 +15,7 @@ import { RequestDataSourceType, FeatureViewProjectionType, } from "../../parsers/feastODFVS"; +import { useParams } from "react-router-dom"; import { EntityRelation } from "../../parsers/parseEntityRelationships"; import { FEAST_FCO_TYPES } from "../../parsers/types"; import useLoadRelationshipData from "../../queries/useLoadRelationshipsData"; @@ -39,6 +40,7 @@ const OnDemandFeatureViewOverviewTab = ({ data, }: OnDemandFeatureViewOverviewTabProps) => { const inputs = Object.entries(data.spec.sources); + const { projectName } = useParams(); const relationshipQuery = useLoadRelationshipData(); const fsNames = relationshipQuery.data @@ -71,10 +73,12 @@ const OnDemandFeatureViewOverviewTab = ({

Features ({data.spec.features.length})

- {data.spec.features ? ( + {projectName && data.spec.features ? ( ) : ( No Tags sepcified on this feature view. diff --git a/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx b/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx index d284d697e8..689bc6b902 100644 --- a/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx +++ b/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx @@ -69,10 +69,12 @@ const RegularFeatureViewOverviewTab = ({

Features ({data.spec.features.length})

- {data.spec.features ? ( + {projectName && data.spec.features ? ( ) : ( No features specified on this feature view. diff --git a/ui/src/pages/features/FeatureInstance.tsx b/ui/src/pages/features/FeatureInstance.tsx new file mode 100644 index 0000000000..6eb7d0f2d6 --- /dev/null +++ b/ui/src/pages/features/FeatureInstance.tsx @@ -0,0 +1,62 @@ +import React from "react"; +import { Route, Routes, useNavigate, useParams } from "react-router-dom"; +import { + EuiPageHeader, + EuiPageContent, + EuiPageContentBody, +} from "@elastic/eui"; + +import { FeatureIcon32 } from "../../graphics/FeatureIcon"; +import { useMatchExact } from "../../hooks/useMatchSubpath"; +import FeatureOverviewTab from "./FeatureOverviewTab"; +import { useDocumentTitle } from "../../hooks/useDocumentTitle"; +import { + useFeatureCustomTabs, + useFeatureCustomTabRoutes, +} from "../../custom-tabs/TabsRegistryContext"; + +const FeatureInstance = () => { + const navigate = useNavigate(); + let { FeatureViewName, FeatureName } = useParams(); + + const { customNavigationTabs } = useFeatureCustomTabs(navigate); + const CustomTabRoutes = useFeatureCustomTabRoutes(); + + useDocumentTitle(`${FeatureName} | ${FeatureViewName} | Feast`); + + return ( + + { + navigate(""); + }, + }, + ...customNavigationTabs, + ]} + /> + + + + } /> + {CustomTabRoutes} + + + + + ); +}; + +export default FeatureInstance; diff --git a/ui/src/pages/features/FeatureOverviewTab.tsx b/ui/src/pages/features/FeatureOverviewTab.tsx new file mode 100644 index 0000000000..0a1c48509c --- /dev/null +++ b/ui/src/pages/features/FeatureOverviewTab.tsx @@ -0,0 +1,71 @@ +import { + EuiFlexGroup, + EuiHorizontalRule, + EuiLoadingSpinner, + EuiTitle, + EuiPanel, + EuiFlexItem, + EuiDescriptionList, + EuiDescriptionListTitle, + EuiDescriptionListDescription, +} from "@elastic/eui"; +import EuiCustomLink from "../../components/EuiCustomLink"; +import React from "react"; +import { useParams } from "react-router-dom"; +import useLoadFeature from "./useLoadFeature"; + +const FeatureOverviewTab = () => { + let { projectName, FeatureViewName, FeatureName } = useParams(); + + const eName = FeatureViewName === undefined ? "" : FeatureViewName; + const fName = FeatureName === undefined ? "" : FeatureName; + const { isLoading, isSuccess, isError, data, featureData } = useLoadFeature(eName, fName); + const isEmpty = data === undefined || featureData === undefined; + + return ( + + {isLoading && ( + + Loading + + )} + {isEmpty &&

No Feature with name {FeatureName} in FeatureView {FeatureViewName}

} + {isError &&

Error loading Feature {FeatureName} in FeatureView {FeatureViewName}

} + {isSuccess && data && ( + + + + + +

Properties

+
+ + + Name + + {featureData?.name} + + + Value Type + + {featureData?.valueType} + + + FeatureView + + + {FeatureViewName} + + + +
+
+
+
+ )} +
+ ); +}; +export default FeatureOverviewTab; diff --git a/ui/src/pages/features/FeatureRawData.tsx b/ui/src/pages/features/FeatureRawData.tsx new file mode 100644 index 0000000000..efbe29d431 --- /dev/null +++ b/ui/src/pages/features/FeatureRawData.tsx @@ -0,0 +1,25 @@ +import React from "react"; +import { EuiPanel } from "@elastic/eui"; +import { useParams } from "react-router-dom"; +import useLoadFeature from "./useLoadFeature"; + +const FeatureRawData = () => { + let { FeatureViewName, FeatureName } = useParams(); + + const eName = FeatureViewName === undefined ? "" : FeatureViewName; + const fName = FeatureName === undefined ? "" : FeatureName; + + const { isSuccess, data } = useLoadFeature(eName, fName); + + return isSuccess && data ? ( + +
{JSON.stringify(data, null, 2)}
+
+ ) : ( + + No data so sad ;-; + + ); +}; + +export default FeatureRawData; diff --git a/ui/src/pages/features/useLoadFeature.ts b/ui/src/pages/features/useLoadFeature.ts new file mode 100644 index 0000000000..5ddaf28204 --- /dev/null +++ b/ui/src/pages/features/useLoadFeature.ts @@ -0,0 +1,29 @@ +import { useContext } from "react"; +import RegistryPathContext from "../../contexts/RegistryPathContext"; +import useLoadRegistry from "../../queries/useLoadRegistry"; + +const useLoadFeature = (featureViewName: string, featureName: string) => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.objects.featureViews?.find((fv) => { + return fv.spec.name === featureViewName; + }); + + const featureData = + data === undefined + ? undefined + : data?.spec.features.find((f) => { + return f.name === featureName; + }); + + return { + ...registryQuery, + featureData, + }; +}; + +export default useLoadFeature; diff --git a/ui/src/parsers/feastFeatures.ts b/ui/src/parsers/feastFeatures.ts new file mode 100644 index 0000000000..129120c168 --- /dev/null +++ b/ui/src/parsers/feastFeatures.ts @@ -0,0 +1,11 @@ +import { z } from "zod"; +import { FEAST_FEATURE_VALUE_TYPES } from "./types"; +import { jsonSchema } from "./jsonType" + +const FeastFeatureSchema = z.object({ + name: z.string(), + valueType: z.nativeEnum(FEAST_FEATURE_VALUE_TYPES), + metadata: jsonSchema.optional(), +}); + +export { FeastFeatureSchema }; diff --git a/ui/src/parsers/jsonType.ts b/ui/src/parsers/jsonType.ts new file mode 100644 index 0000000000..be484b5477 --- /dev/null +++ b/ui/src/parsers/jsonType.ts @@ -0,0 +1,11 @@ +import { z } from "zod"; + +// Taken from the zod documentation code - accepts any JSON object. +const literalSchema = z.union([z.string(), z.number(), z.boolean(), z.null()]); +type Literal = z.infer; +type Json = Literal | { [key: string]: Json } | Json[]; +const jsonSchema: z.ZodType = z.lazy(() => + z.union([literalSchema, z.array(jsonSchema), z.record(jsonSchema)]) +); + +export { jsonSchema }; diff --git a/ui/src/utils/custom-tabs/FeatureCustomTabLoadingWrapper.tsx b/ui/src/utils/custom-tabs/FeatureCustomTabLoadingWrapper.tsx new file mode 100644 index 0000000000..7880f82490 --- /dev/null +++ b/ui/src/utils/custom-tabs/FeatureCustomTabLoadingWrapper.tsx @@ -0,0 +1,37 @@ +import React from "react"; +import { useParams } from "react-router-dom"; + +import { FeatureCustomTabProps } from "../../custom-tabs/types"; +import useLoadFeature from "../../pages/features/useLoadFeature"; + +interface FeatureCustomTabLoadingWrapperProps { + Component: (props: FeatureCustomTabProps) => JSX.Element; +} + +const FeatureCustomTabLoadingWrapper = ({ + Component, +}: FeatureCustomTabLoadingWrapperProps) => { + console.log(useParams()); + const { FeatureViewName, FeatureName } = useParams(); + + if (!FeatureViewName) { + throw new Error( + `This route has no 'FeatureViewName' part. This route is likely not supposed to render this component.` + ); + } + + if (!FeatureName) { + throw new Error( + `This route has no 'FeatureName' part. This route is likely not supposed to render this component.` + ); + } + + const feastObjectQuery = useLoadFeature(FeatureViewName, FeatureName); + + // do I include FeatureViewName in this? + return ( + + ); +}; + +export default FeatureCustomTabLoadingWrapper; From 0ceb39c276c5eba08014acb2f5beb03fedc0a700 Mon Sep 17 00:00:00 2001 From: sfc-gh-madkins <82121043+sfc-gh-madkins@users.noreply.github.com> Date: Sat, 2 Jul 2022 13:45:24 -0500 Subject: [PATCH 09/73] fix snowflake testing (#2903) Signed-off-by: Miles Adkins --- .../tests/integration/feature_repos/repo_configuration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 6f40d3171b..a168f4f028 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -78,7 +78,7 @@ "file": ("local", FileDataSourceCreator), "bigquery": ("gcp", BigQueryDataSourceCreator), "redshift": ("aws", RedshiftDataSourceCreator), - "snowflake": ("aws", RedshiftDataSourceCreator), + "snowflake": ("aws", SnowflakeDataSourceCreator), } AVAILABLE_OFFLINE_STORES: List[Tuple[str, Type[DataSourceCreator]]] = [ From f758f9e148212d08f63df155e864940c27d92155 Mon Sep 17 00:00:00 2001 From: sfc-gh-madkins <82121043+sfc-gh-madkins@users.noreply.github.com> Date: Sat, 2 Jul 2022 21:44:39 -0500 Subject: [PATCH 10/73] feat: Add snowflake online store (#2902) * feat: Add snowflake online store Signed-off-by: Miles Adkins * lint/format Signed-off-by: Miles Adkins * removing missing testing env variables Signed-off-by: Miles Adkins * test offline store first Signed-off-by: Miles Adkins * snowflake online test fixes Signed-off-by: Miles Adkins * format Signed-off-by: Miles Adkins * fix snowflake testing (#2903) Signed-off-by: Miles Adkins --- .../feast/infra/online_stores/snowflake.py | 232 ++++++++++++++++++ .../feast/infra/utils/snowflake_utils.py | 181 +++++++++++++- .../feature_repos/repo_configuration.py | 12 + 3 files changed, 423 insertions(+), 2 deletions(-) create mode 100644 sdk/python/feast/infra/online_stores/snowflake.py diff --git a/sdk/python/feast/infra/online_stores/snowflake.py b/sdk/python/feast/infra/online_stores/snowflake.py new file mode 100644 index 0000000000..80074cf509 --- /dev/null +++ b/sdk/python/feast/infra/online_stores/snowflake.py @@ -0,0 +1,232 @@ +import itertools +import os +from binascii import hexlify +from datetime import datetime +from pathlib import Path +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple + +import pandas as pd +import pytz +from pydantic import Field +from pydantic.schema import Literal + +from feast import Entity, FeatureView +from feast.infra.key_encoding_utils import serialize_entity_key +from feast.infra.online_stores.online_store import OnlineStore +from feast.infra.utils.snowflake_utils import get_snowflake_conn, write_pandas_binary +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto +from feast.repo_config import FeastConfigBaseModel, RepoConfig +from feast.usage import log_exceptions_and_usage + + +class SnowflakeOnlineStoreConfig(FeastConfigBaseModel): + """ Online store config for Snowflake """ + + type: Literal["snowflake.online"] = "snowflake.online" + """ Online store type selector""" + + config_path: Optional[str] = ( + Path(os.environ["HOME"]) / ".snowsql/config" + ).__str__() + """ Snowflake config path -- absolute path required (Can't use ~)""" + + account: Optional[str] = None + """ Snowflake deployment identifier -- drop .snowflakecomputing.com""" + + user: Optional[str] = None + """ Snowflake user name """ + + password: Optional[str] = None + """ Snowflake password """ + + role: Optional[str] = None + """ Snowflake role name""" + + warehouse: Optional[str] = None + """ Snowflake warehouse name """ + + database: Optional[str] = None + """ Snowflake database name """ + + schema_: Optional[str] = Field("PUBLIC", alias="schema") + """ Snowflake schema name """ + + class Config: + allow_population_by_field_name = True + + +class SnowflakeOnlineStore(OnlineStore): + @log_exceptions_and_usage(online_store="snowflake") + def online_write_batch( + self, + config: RepoConfig, + table: FeatureView, + data: List[ + Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] + ], + progress: Optional[Callable[[int], Any]], + ) -> None: + assert isinstance(config.online_store, SnowflakeOnlineStoreConfig) + + dfs = [None] * len(data) + for i, (entity_key, values, timestamp, created_ts) in enumerate(data): + + df = pd.DataFrame( + columns=[ + "entity_feature_key", + "entity_key", + "feature_name", + "value", + "event_ts", + "created_ts", + ], + index=range(0, len(values)), + ) + + timestamp = _to_naive_utc(timestamp) + if created_ts is not None: + created_ts = _to_naive_utc(created_ts) + + for j, (feature_name, val) in enumerate(values.items()): + df.loc[j, "entity_feature_key"] = serialize_entity_key( + entity_key + ) + bytes(feature_name, encoding="utf-8") + df.loc[j, "entity_key"] = serialize_entity_key(entity_key) + df.loc[j, "feature_name"] = feature_name + df.loc[j, "value"] = val.SerializeToString() + df.loc[j, "event_ts"] = timestamp + df.loc[j, "created_ts"] = created_ts + + dfs[i] = df + if progress: + progress(1) + + if dfs: + agg_df = pd.concat(dfs) + + with get_snowflake_conn(config.online_store, autocommit=False) as conn: + + write_pandas_binary(conn, agg_df, f"{config.project}_{table.name}") + + query = f""" + INSERT OVERWRITE INTO "{config.online_store.database}"."{config.online_store.schema_}"."{config.project}_{table.name}" + SELECT + "entity_feature_key", + "entity_key", + "feature_name", + "value", + "event_ts", + "created_ts" + FROM + (SELECT + *, + ROW_NUMBER() OVER(PARTITION BY "entity_key","feature_name" ORDER BY "event_ts" DESC, "created_ts" DESC) AS "_feast_row" + FROM + "{config.online_store.database}"."{config.online_store.schema_}"."{config.project}_{table.name}") + WHERE + "_feast_row" = 1; + """ + + conn.cursor().execute(query) + + return None + + @log_exceptions_and_usage(online_store="snowflake") + def online_read( + self, + config: RepoConfig, + table: FeatureView, + entity_keys: List[EntityKeyProto], + requested_features: List[str], + ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: + assert isinstance(config.online_store, SnowflakeOnlineStoreConfig) + + result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] + + with get_snowflake_conn(config.online_store) as conn: + + df = ( + conn.cursor() + .execute( + f""" + SELECT + "entity_key", "feature_name", "value", "event_ts" + FROM + "{config.online_store.database}"."{config.online_store.schema_}"."{config.project}_{table.name}" + WHERE + "entity_feature_key" IN ({','.join([('TO_BINARY('+hexlify(serialize_entity_key(combo[0])+bytes(combo[1], encoding='utf-8')).__str__()[1:]+")") for combo in itertools.product(entity_keys,requested_features)])}) + """, + ) + .fetch_pandas_all() + ) + + for entity_key in entity_keys: + entity_key_bin = serialize_entity_key(entity_key) + res = {} + res_ts = None + for index, row in df[df["entity_key"] == entity_key_bin].iterrows(): + val = ValueProto() + val.ParseFromString(row["value"]) + res[row["feature_name"]] = val + res_ts = row["event_ts"].to_pydatetime() + + if not res: + result.append((None, None)) + else: + result.append((res_ts, res)) + return result + + @log_exceptions_and_usage(online_store="snowflake") + def update( + self, + config: RepoConfig, + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], + entities_to_delete: Sequence[Entity], + entities_to_keep: Sequence[Entity], + partial: bool, + ): + assert isinstance(config.online_store, SnowflakeOnlineStoreConfig) + + with get_snowflake_conn(config.online_store) as conn: + + for table in tables_to_keep: + + conn.cursor().execute( + f"""CREATE TABLE IF NOT EXISTS "{config.online_store.database}"."{config.online_store.schema_}"."{config.project}_{table.name}" ( + "entity_feature_key" BINARY, + "entity_key" BINARY, + "feature_name" VARCHAR, + "value" BINARY, + "event_ts" TIMESTAMP, + "created_ts" TIMESTAMP + )""" + ) + + for table in tables_to_delete: + + conn.cursor().execute( + f'DROP TABLE IF EXISTS "{config.online_store.database}"."{config.online_store.schema_}"."{config.project}_{table.name}"' + ) + + def teardown( + self, + config: RepoConfig, + tables: Sequence[FeatureView], + entities: Sequence[Entity], + ): + assert isinstance(config.online_store, SnowflakeOnlineStoreConfig) + + with get_snowflake_conn(config.online_store) as conn: + + for table in tables: + query = f'DROP TABLE IF EXISTS "{config.online_store.database}"."{config.online_store.schema_}"."{config.project}_{table.name}"' + conn.cursor().execute(query) + + +def _to_naive_utc(ts: datetime): + if ts.tzinfo is None: + return ts + else: + return ts.astimezone(pytz.utc).replace(tzinfo=None) diff --git a/sdk/python/feast/infra/utils/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake_utils.py index 05834ae436..d9be930439 100644 --- a/sdk/python/feast/infra/utils/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake_utils.py @@ -44,8 +44,12 @@ def execute_snowflake_statement(conn: SnowflakeConnection, query) -> SnowflakeCu def get_snowflake_conn(config, autocommit=True) -> SnowflakeConnection: - assert config.type == "snowflake.offline" - config_header = "connections.feast_offline_store" + assert config.type in ["snowflake.offline", "snowflake.online"] + + if config.type == "snowflake.offline": + config_header = "connections.feast_offline_store" + elif config.type == "snowflake.online": + config_header = "connections.feast_online_store" config_dict = dict(config) @@ -429,3 +433,176 @@ def parse_private_key_path(key_path: str, private_key_passphrase: str) -> bytes: ) return pkb + + +def write_pandas_binary( + conn: SnowflakeConnection, + df: pd.DataFrame, + table_name: str, + database: Optional[str] = None, + schema: Optional[str] = None, + chunk_size: Optional[int] = None, + compression: str = "gzip", + on_error: str = "abort_statement", + parallel: int = 4, + quote_identifiers: bool = True, + auto_create_table: bool = False, + create_temp_table: bool = False, +): + """Allows users to most efficiently write back a pandas DataFrame to Snowflake. + + It works by dumping the DataFrame into Parquet files, uploading them and finally copying their data into the table. + + Returns whether all files were ingested correctly, number of chunks uploaded, and number of rows ingested + with all of the COPY INTO command's output for debugging purposes. + + Example usage: + import pandas + from snowflake.connector.pandas_tools import write_pandas + + df = pandas.DataFrame([('Mark', 10), ('Luke', 20)], columns=['name', 'balance']) + success, nchunks, nrows, _ = write_pandas(cnx, df, 'customers') + + Args: + conn: Connection to be used to communicate with Snowflake. + df: Dataframe we'd like to write back. + table_name: Table name where we want to insert into. + database: Database schema and table is in, if not provided the default one will be used (Default value = None). + schema: Schema table is in, if not provided the default one will be used (Default value = None). + chunk_size: Number of elements to be inserted once, if not provided all elements will be dumped once + (Default value = None). + compression: The compression used on the Parquet files, can only be gzip, or snappy. Gzip gives supposedly a + better compression, while snappy is faster. Use whichever is more appropriate (Default value = 'gzip'). + on_error: Action to take when COPY INTO statements fail, default follows documentation at: + https://docs.snowflake.com/en/sql-reference/sql/copy-into-table.html#copy-options-copyoptions + (Default value = 'abort_statement'). + parallel: Number of threads to be used when uploading chunks, default follows documentation at: + https://docs.snowflake.com/en/sql-reference/sql/put.html#optional-parameters (Default value = 4). + quote_identifiers: By default, identifiers, specifically database, schema, table and column names + (from df.columns) will be quoted. If set to False, identifiers are passed on to Snowflake without quoting. + I.e. identifiers will be coerced to uppercase by Snowflake. (Default value = True) + auto_create_table: When true, will automatically create a table with corresponding columns for each column in + the passed in DataFrame. The table will not be created if it already exists + create_temp_table: Will make the auto-created table as a temporary table + """ + if database is not None and schema is None: + raise ProgrammingError( + "Schema has to be provided to write_pandas when a database is provided" + ) + # This dictionary maps the compression algorithm to Snowflake put copy into command type + # https://docs.snowflake.com/en/sql-reference/sql/copy-into-table.html#type-parquet + compression_map = {"gzip": "auto", "snappy": "snappy"} + if compression not in compression_map.keys(): + raise ProgrammingError( + "Invalid compression '{}', only acceptable values are: {}".format( + compression, compression_map.keys() + ) + ) + if quote_identifiers: + location = ( + (('"' + database + '".') if database else "") + + (('"' + schema + '".') if schema else "") + + ('"' + table_name + '"') + ) + else: + location = ( + (database + "." if database else "") + + (schema + "." if schema else "") + + (table_name) + ) + if chunk_size is None: + chunk_size = len(df) + cursor: SnowflakeCursor = conn.cursor() + stage_name = create_temporary_sfc_stage(cursor) + + with TemporaryDirectory() as tmp_folder: + for i, chunk in chunk_helper(df, chunk_size): + chunk_path = os.path.join(tmp_folder, "file{}.txt".format(i)) + # Dump chunk into parquet file + chunk.to_parquet( + chunk_path, + compression=compression, + use_deprecated_int96_timestamps=True, + ) + # Upload parquet file + upload_sql = ( + "PUT /* Python:snowflake.connector.pandas_tools.write_pandas() */ " + "'file://{path}' @\"{stage_name}\" PARALLEL={parallel}" + ).format( + path=chunk_path.replace("\\", "\\\\").replace("'", "\\'"), + stage_name=stage_name, + parallel=parallel, + ) + logger.debug(f"uploading files with '{upload_sql}'") + cursor.execute(upload_sql, _is_internal=True) + # Remove chunk file + os.remove(chunk_path) + if quote_identifiers: + columns = '"' + '","'.join(list(df.columns)) + '"' + else: + columns = ",".join(list(df.columns)) + + if auto_create_table: + file_format_name = create_file_format(compression, compression_map, cursor) + infer_schema_sql = f"SELECT COLUMN_NAME, TYPE FROM table(infer_schema(location=>'@\"{stage_name}\"', file_format=>'{file_format_name}'))" + logger.debug(f"inferring schema with '{infer_schema_sql}'") + result_cursor = cursor.execute(infer_schema_sql, _is_internal=True) + if result_cursor is None: + raise SnowflakeQueryUnknownError(infer_schema_sql) + result = cast(List[Tuple[str, str]], result_cursor.fetchall()) + column_type_mapping: Dict[str, str] = dict(result) + # Infer schema can return the columns out of order depending on the chunking we do when uploading + # so we have to iterate through the dataframe columns to make sure we create the table with its + # columns in order + quote = '"' if quote_identifiers else "" + create_table_columns = ", ".join( + [f"{quote}{c}{quote} {column_type_mapping[c]}" for c in df.columns] + ) + create_table_sql = ( + f"CREATE {'TEMP ' if create_temp_table else ''}TABLE IF NOT EXISTS {location} " + f"({create_table_columns})" + f" /* Python:snowflake.connector.pandas_tools.write_pandas() */ " + ) + logger.debug(f"auto creating table with '{create_table_sql}'") + cursor.execute(create_table_sql, _is_internal=True) + drop_file_format_sql = f"DROP FILE FORMAT IF EXISTS {file_format_name}" + logger.debug(f"dropping file format with '{drop_file_format_sql}'") + cursor.execute(drop_file_format_sql, _is_internal=True) + + # in Snowflake, all parquet data is stored in a single column, $1, so we must select columns explicitly + # see (https://docs.snowflake.com/en/user-guide/script-data-load-transform-parquet.html) + if quote_identifiers: + parquet_columns = ",".join( + f'TO_BINARY($1:"{c}")' + if c in ["entity_feature_key", "entity_key", "value"] + else f'$1:"{c}"' + for c in df.columns + ) + else: + parquet_columns = ",".join( + f"TO_BINARY($1:{c})" + if c in ["entity_feature_key", "entity_key", "value"] + else f"$1:{c}" + for c in df.columns + ) + + copy_into_sql = ( + "COPY INTO {location} /* Python:snowflake.connector.pandas_tools.write_pandas() */ " + "({columns}) " + 'FROM (SELECT {parquet_columns} FROM @"{stage_name}") ' + "FILE_FORMAT=(TYPE=PARQUET COMPRESSION={compression} BINARY_AS_TEXT = FALSE) " + "PURGE=TRUE ON_ERROR={on_error}" + ).format( + location=location, + columns=columns, + parquet_columns=parquet_columns, + stage_name=stage_name, + compression=compression_map[compression], + on_error=on_error, + ) + logger.debug("copying into with '{}'".format(copy_into_sql)) + # Snowflake returns the original cursor if the query execution succeeded. + result_cursor = cursor.execute(copy_into_sql, _is_internal=True) + if result_cursor is None: + raise SnowflakeQueryUnknownError(copy_into_sql) + result_cursor.close() diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index a168f4f028..67e585839d 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -74,6 +74,17 @@ "connection_string": "127.0.0.1:6001,127.0.0.1:6002,127.0.0.1:6003", } +SNOWFLAKE_CONFIG = { + "type": "snowflake.online", + "account": os.environ["SNOWFLAKE_CI_DEPLOYMENT"], + "user": os.environ["SNOWFLAKE_CI_USER"], + "password": os.environ["SNOWFLAKE_CI_PASSWORD"], + "role": os.environ["SNOWFLAKE_CI_ROLE"], + "warehouse": os.environ["SNOWFLAKE_CI_WAREHOUSE"], + "database": "FEAST", + "schema": "ONLINE", +} + OFFLINE_STORE_TO_PROVIDER_CONFIG: Dict[str, DataSourceCreator] = { "file": ("local", FileDataSourceCreator), "bigquery": ("gcp", BigQueryDataSourceCreator), @@ -103,6 +114,7 @@ AVAILABLE_ONLINE_STORES["redis"] = (REDIS_CONFIG, None) AVAILABLE_ONLINE_STORES["dynamodb"] = (DYNAMO_CONFIG, None) AVAILABLE_ONLINE_STORES["datastore"] = ("datastore", None) + AVAILABLE_ONLINE_STORES["snowflake"] = (SNOWFLAKE_CONFIG, None) full_repo_configs_module = os.environ.get(FULL_REPO_CONFIGS_MODULE_ENV_NAME) From 8828240811b495d27ffde680b08bd0fcc2edba04 Mon Sep 17 00:00:00 2001 From: Felix Wang Date: Tue, 5 Jul 2022 10:05:10 -0700 Subject: [PATCH 11/73] chore: Change pytest fixtures to be function-scoped instead of session-scoped (#2899) * Test to trigger registry conflicts Signed-off-by: Felix Wang * Switch environment and associated fixtures to being function scoped Signed-off-by: Felix Wang * Format Signed-off-by: Felix Wang * Switch type tests to function-scoped fixtures Signed-off-by: Felix Wang * Format Signed-off-by: Felix Wang --- sdk/python/tests/conftest.py | 8 +++---- .../integration/e2e/test_go_feature_server.py | 2 +- .../registration/test_universal_types.py | 8 ++----- .../tests/unit/test_registry_conflict.py | 23 +++++++++++++++++++ 4 files changed, 30 insertions(+), 11 deletions(-) create mode 100644 sdk/python/tests/unit/test_registry_conflict.py diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index bf69a85fa3..5fe9b5b699 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -161,7 +161,7 @@ def start_test_local_server(repo_path: str, port: int): fs.serve("localhost", port, no_access_log=True) -@pytest.fixture(scope="session") +@pytest.fixture def environment(request, worker_id): e = construct_test_environment( request.param, worker_id=worker_id, fixture_request=request @@ -293,7 +293,7 @@ def pytest_generate_tests(metafunc: pytest.Metafunc): ) -@pytest.fixture(scope="session") +@pytest.fixture def feature_server_endpoint(environment): if ( not environment.python_feature_server @@ -344,12 +344,12 @@ def _free_port(): return sock.getsockname()[1] -@pytest.fixture(scope="session") +@pytest.fixture def universal_data_sources(environment) -> TestData: return construct_universal_test_data(environment) -@pytest.fixture(scope="session") +@pytest.fixture def e2e_data_sources(environment: Environment): df = create_dataset() data_source = environment.data_source_creator.create_data_source( diff --git a/sdk/python/tests/integration/e2e/test_go_feature_server.py b/sdk/python/tests/integration/e2e/test_go_feature_server.py index 4fd003c194..465fa41769 100644 --- a/sdk/python/tests/integration/e2e/test_go_feature_server.py +++ b/sdk/python/tests/integration/e2e/test_go_feature_server.py @@ -35,7 +35,7 @@ ) -@pytest.fixture(scope="session") +@pytest.fixture def initialized_registry(environment, universal_data_sources): fs = environment.feature_store diff --git a/sdk/python/tests/integration/registration/test_universal_types.py b/sdk/python/tests/integration/registration/test_universal_types.py index b03303f6ee..d5cf270b25 100644 --- a/sdk/python/tests/integration/registration/test_universal_types.py +++ b/sdk/python/tests/integration/registration/test_universal_types.py @@ -64,9 +64,7 @@ class TypeTestConfig: @pytest.fixture( - params=OFFLINE_TYPE_TEST_CONFIGS, - scope="session", - ids=[str(c) for c in OFFLINE_TYPE_TEST_CONFIGS], + params=OFFLINE_TYPE_TEST_CONFIGS, ids=[str(c) for c in OFFLINE_TYPE_TEST_CONFIGS], ) def offline_types_test_fixtures(request, environment): config: TypeTestConfig = request.param @@ -80,9 +78,7 @@ def offline_types_test_fixtures(request, environment): @pytest.fixture( - params=ONLINE_TYPE_TEST_CONFIGS, - scope="session", - ids=[str(c) for c in ONLINE_TYPE_TEST_CONFIGS], + params=ONLINE_TYPE_TEST_CONFIGS, ids=[str(c) for c in ONLINE_TYPE_TEST_CONFIGS], ) def online_types_test_fixtures(request, environment): return get_fixtures(request, environment) diff --git a/sdk/python/tests/unit/test_registry_conflict.py b/sdk/python/tests/unit/test_registry_conflict.py new file mode 100644 index 0000000000..12f666556a --- /dev/null +++ b/sdk/python/tests/unit/test_registry_conflict.py @@ -0,0 +1,23 @@ +import pytest + +from feast.entity import Entity + + +@pytest.mark.integration +def test_apply_first_entity(environment): + entity = Entity(name="first") + fs = environment.feature_store + fs.apply([entity]) + + entities = fs.list_entities() + assert len(entities) == 1 + + +@pytest.mark.integration +def test_apply_second_entity(environment): + entity = Entity(name="second") + fs = environment.feature_store + fs.apply([entity]) + + entities = fs.list_entities() + assert len(entities) == 1 From 0ec7d1abd3f509e17870ca168ece356382fb7fe9 Mon Sep 17 00:00:00 2001 From: Breno Costa <35263725+breno-costa@users.noreply.github.com> Date: Tue, 5 Jul 2022 19:06:08 +0200 Subject: [PATCH 12/73] fix: Change the feature store plan method to public modifier (#2904) Signed-off-by: Breno Costa --- sdk/python/feast/feature_store.py | 8 ++++---- sdk/python/feast/repo_operations.py | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 78431e2d61..e3c1251dad 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -511,8 +511,8 @@ def _get_features( return _feature_refs def _should_use_plan(self): - """Returns True if _plan and _apply_diffs should be used, False otherwise.""" - # Currently only the local provider with sqlite online store supports _plan and _apply_diffs. + """Returns True if plan and _apply_diffs should be used, False otherwise.""" + # Currently only the local provider with sqlite online store supports plan and _apply_diffs. return self.config.provider == "local" and ( self.config.online_store and self.config.online_store.type == "sqlite" ) @@ -636,7 +636,7 @@ def _get_feature_views_to_materialize( return feature_views_to_materialize @log_exceptions_and_usage - def _plan( + def plan( self, desired_repo_contents: RepoContents ) -> Tuple[RegistryDiff, InfraDiff, Infra]: """Dry-run registering objects to metadata store. @@ -670,7 +670,7 @@ def _plan( ... ttl=timedelta(seconds=86400 * 1), ... batch_source=driver_hourly_stats, ... ) - >>> registry_diff, infra_diff, new_infra = fs._plan(RepoContents( + >>> registry_diff, infra_diff, new_infra = fs.plan(RepoContents( ... data_sources=[driver_hourly_stats], ... feature_views=[driver_hourly_stats_view], ... on_demand_feature_views=list(), diff --git a/sdk/python/feast/repo_operations.py b/sdk/python/feast/repo_operations.py index 37daa6500e..9a5e64f8c3 100644 --- a/sdk/python/feast/repo_operations.py +++ b/sdk/python/feast/repo_operations.py @@ -183,7 +183,7 @@ def plan(repo_config: RepoConfig, repo_path: Path, skip_source_validation: bool) for data_source in data_sources: data_source.validate(store.config) - registry_diff, infra_diff, _ = store._plan(repo) + registry_diff, infra_diff, _ = store.plan(repo) click.echo(registry_diff.to_string()) click.echo(infra_diff.to_string()) @@ -262,7 +262,7 @@ def apply_total_with_repo_instance( for data_source in data_sources: data_source.validate(store.config) - registry_diff, infra_diff, new_infra = store._plan(repo) + registry_diff, infra_diff, new_infra = store.plan(repo) # For each object in the registry, determine whether it should be kept or deleted. ( From 0159f3875de7c8509c465346bd13dd11fba0d467 Mon Sep 17 00:00:00 2001 From: kindalime Date: Tue, 5 Jul 2022 12:05:08 -0700 Subject: [PATCH 13/73] feat: Add custom JSON table tab w/ formatting (#2851) * Add metadata tab functionality Signed-off-by: Daniel Kim * Undo unnecessary changes and bring back Regular FV Demo Tab Signed-off-by: Daniel Kim * Change metadata tab to accept custom JSON file input. Signed-off-by: Daniel Kim * Rename instances of metadata to data Signed-off-by: Daniel Kim --- ui/src/custom-tabs/data-tab/DataQuery.tsx | 25 ++++ ui/src/custom-tabs/data-tab/DataTab.tsx | 110 ++++++++++++++++++ .../reguar-fv-demo-tab/DemoCustomTab.tsx | 2 +- .../reguar-fv-demo-tab/useDemoQuery.tsx | 2 +- ui/src/index.tsx | 6 + .../useLoadFeatureViewSummaryStatistics.ts | 2 +- 6 files changed, 144 insertions(+), 3 deletions(-) create mode 100644 ui/src/custom-tabs/data-tab/DataQuery.tsx create mode 100644 ui/src/custom-tabs/data-tab/DataTab.tsx diff --git a/ui/src/custom-tabs/data-tab/DataQuery.tsx b/ui/src/custom-tabs/data-tab/DataQuery.tsx new file mode 100644 index 0000000000..f101c122e4 --- /dev/null +++ b/ui/src/custom-tabs/data-tab/DataQuery.tsx @@ -0,0 +1,25 @@ +import { useQuery } from "react-query"; + +interface DataQueryInterface { + featureView: string | undefined; +} + +const DataQuery = (featureView: string) => { + const queryKey = `data-tab-namespace:${featureView}`; + + return useQuery( + queryKey, + () => { + // Customizing the URL based on your needs + const url = `/demo-custom-tabs/demo.json`; + + return fetch(url) + .then((res) => res.json()) + }, + { + enabled: !!featureView, // Only start the query when the variable is not undefined + } + ); +}; + +export default DataQuery; diff --git a/ui/src/custom-tabs/data-tab/DataTab.tsx b/ui/src/custom-tabs/data-tab/DataTab.tsx new file mode 100644 index 0000000000..144083420a --- /dev/null +++ b/ui/src/custom-tabs/data-tab/DataTab.tsx @@ -0,0 +1,110 @@ +import React from "react"; +import { z } from "zod"; +import { + EuiCode, + EuiFlexGroup, + EuiHorizontalRule, + EuiLoadingSpinner, + EuiTable, + EuiTitle, + EuiTableHeader, + EuiTableHeaderCell, + EuiPanel, + EuiFlexItem, + EuiTableRow, + EuiTableRowCell, +} from "@elastic/eui"; +import useLoadRegularFeatureView from "../../pages/feature-views/useLoadFeatureView"; +import DataQuery from "./DataQuery"; + +const FeatureViewDataRow = z.object({ + name: z.string(), + value: z.string(), +}); + +type FeatureViewDataRowType = z.infer; + +const LineHeightProp: React.CSSProperties = { + lineHeight: 1, +} + +const EuiFeatureViewDataRow = ({name, value}: FeatureViewDataRowType) => { + return ( + + + {name} + + + +
+            {value}
+          
+
+
+
+ ); +} + +const FeatureViewDataTable = (data: any) => { + var items: FeatureViewDataRowType[] = []; + + for (let element in data.data){ + const row: FeatureViewDataRowType = { + name: element, + value: JSON.stringify(data.data[element], null, 2), + }; + items.push(row); + console.log(row); + } + + return ( + + + + Data Item Name + + + Data Item Value + + + {items.map((item) => { + return + })} + + ) +} + +const DataTab = () => { + const fName = "credit_history" + const { isLoading, isError, isSuccess, data } = DataQuery(fName); + const isEmpty = data === undefined; + + return ( + + {isLoading && ( + + Loading + + )} + {isEmpty &&

No feature view with name: {fName}

} + {isError &&

Error loading feature view: {fName}

} + {isSuccess && data && ( + + + + + +

Properties

+
+ + +
+
+
+
+ )} +
+ ); +}; + +export default DataTab; diff --git a/ui/src/custom-tabs/reguar-fv-demo-tab/DemoCustomTab.tsx b/ui/src/custom-tabs/reguar-fv-demo-tab/DemoCustomTab.tsx index 2ce1b4e64b..4f8d7dfcb2 100644 --- a/ui/src/custom-tabs/reguar-fv-demo-tab/DemoCustomTab.tsx +++ b/ui/src/custom-tabs/reguar-fv-demo-tab/DemoCustomTab.tsx @@ -82,4 +82,4 @@ const DemoCustomTab = ({ ); }; -export default DemoCustomTab; +export default DemoCustomTab; \ No newline at end of file diff --git a/ui/src/custom-tabs/reguar-fv-demo-tab/useDemoQuery.tsx b/ui/src/custom-tabs/reguar-fv-demo-tab/useDemoQuery.tsx index b93602dbe3..965d511539 100644 --- a/ui/src/custom-tabs/reguar-fv-demo-tab/useDemoQuery.tsx +++ b/ui/src/custom-tabs/reguar-fv-demo-tab/useDemoQuery.tsx @@ -41,4 +41,4 @@ const useDemoQuery = ({ featureView }: DemoQueryInterface) => { }; export default useDemoQuery; -export type { DemoDataType }; +export type { DemoDataType }; \ No newline at end of file diff --git a/ui/src/index.tsx b/ui/src/index.tsx index 8cd73cf094..2233b90c9e 100644 --- a/ui/src/index.tsx +++ b/ui/src/index.tsx @@ -15,6 +15,7 @@ import FeastUI from "./FeastUI"; // 3. Register the tab in the appropriate array below. Each entry // is a record with three keys: label, path, and Component. // Import your component and pass it as Component +import DataTab from "./custom-tabs/data-tab/DataTab"; import RFVDemoCustomTab from "./custom-tabs/reguar-fv-demo-tab/DemoCustomTab"; import ODFVDemoCustomTab from "./custom-tabs/ondemand-fv-demo-tab/DemoCustomTab"; import FSDemoCustomTab from "./custom-tabs/feature-service-demo-tab/DemoCustomTab"; @@ -32,6 +33,11 @@ const tabsRegistry = { path: "demo-tab", // Subpath for the tab Component: RFVDemoCustomTab, }, + { + label: "Data Tab Demo", // Navigation Label for the tab + path: "data-tab", // Subpath for the tab + Component: DataTab, + }, ], OnDemandFeatureViewCustomTabs: [ { diff --git a/ui/src/queries/useLoadFeatureViewSummaryStatistics.ts b/ui/src/queries/useLoadFeatureViewSummaryStatistics.ts index 0604029866..fea0bd9d81 100644 --- a/ui/src/queries/useLoadFeatureViewSummaryStatistics.ts +++ b/ui/src/queries/useLoadFeatureViewSummaryStatistics.ts @@ -9,7 +9,7 @@ const useLoadFeatureViewSummaryStatistics = (featureViewName: string) => { const { projectName } = useParams(); const queryKey = `featureViewSummaryStatistics:${featureViewName}`; - const url = `/metadata/${projectName}/featureView/${featureViewName}.json`; + const url = `/data/${projectName}/featureView/${featureViewName}.json`; return useQuery( queryKey, From eaf40220cb8d377ce2866be9197fa2af21f100ec Mon Sep 17 00:00:00 2001 From: Prasad Zende <37083921+prasadzende@users.noreply.github.com> Date: Wed, 6 Jul 2022 21:18:12 +0530 Subject: [PATCH 14/73] docs: Grammatically updated the quickstart guide docs (#2913) Signed-off-by: Prasad --- docs/getting-started/quickstart.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting-started/quickstart.md b/docs/getting-started/quickstart.md index b5fe7bad4b..972ffa13a9 100644 --- a/docs/getting-started/quickstart.md +++ b/docs/getting-started/quickstart.md @@ -224,7 +224,7 @@ To train a model, we need features and labels. Often, this label data is stored The user can query that table of labels with timestamps and pass that into Feast as an _entity dataframe_ for training data generation. In many cases, Feast will also intelligently join relevant tables to create the relevant feature vectors. -* Note that we include timestamps because want the features for the same driver at various timestamps to be used in a model. +* Note that we include timestamps because we want the features for the same driver at various timestamps to be used in a model. {% tabs %} {% tab title="Python" %} From 38b28ca0181610c65d966a2f09456dbb102fbced Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Wed, 6 Jul 2022 10:39:17 -0700 Subject: [PATCH 15/73] feat: Add interfaces for batch materialization engine (#2901) * feat: Add scaffolding for batch materialization engine Signed-off-by: Achal Shah * fix tests Signed-off-by: Achal Shah * fix tests Signed-off-by: Achal Shah * a little better Signed-off-by: Achal Shah * a little better Signed-off-by: Achal Shah * docs Signed-off-by: Achal Shah * more api updates' Signed-off-by: Achal Shah * fix typos Signed-off-by: Achal Shah * make engine importable Signed-off-by: Achal Shah * style stuff Signed-off-by: Achal Shah * style stuff Signed-off-by: Achal Shah --- .../feast/infra/materialization/__init__.py | 13 ++ .../batch_materialization_engine.py | 122 ++++++++++ .../infra/materialization/local_engine.py | 185 +++++++++++++++ sdk/python/feast/infra/offline_stores/file.py | 8 +- .../infra/offline_stores/offline_utils.py | 3 +- .../feast/infra/passthrough_provider.py | 138 ++++++----- sdk/python/feast/infra/provider.py | 202 +---------------- sdk/python/feast/repo_config.py | 10 + sdk/python/feast/utils.py | 214 ++++++++++++++++++ sdk/python/tests/unit/infra/test_provider.py | 2 +- .../tests/utils/online_write_benchmark.py | 2 +- 11 files changed, 635 insertions(+), 264 deletions(-) create mode 100644 sdk/python/feast/infra/materialization/__init__.py create mode 100644 sdk/python/feast/infra/materialization/batch_materialization_engine.py create mode 100644 sdk/python/feast/infra/materialization/local_engine.py diff --git a/sdk/python/feast/infra/materialization/__init__.py b/sdk/python/feast/infra/materialization/__init__.py new file mode 100644 index 0000000000..6be653b26e --- /dev/null +++ b/sdk/python/feast/infra/materialization/__init__.py @@ -0,0 +1,13 @@ +from .batch_materialization_engine import ( + BatchMaterializationEngine, + MaterializationJob, + MaterializationTask, +) +from .local_engine import LocalMaterializationEngine + +__all__ = [ + "MaterializationJob", + "MaterializationTask", + "BatchMaterializationEngine", + "LocalMaterializationEngine", +] diff --git a/sdk/python/feast/infra/materialization/batch_materialization_engine.py b/sdk/python/feast/infra/materialization/batch_materialization_engine.py new file mode 100644 index 0000000000..773c685d6e --- /dev/null +++ b/sdk/python/feast/infra/materialization/batch_materialization_engine.py @@ -0,0 +1,122 @@ +import enum +from abc import ABC, abstractmethod +from dataclasses import dataclass +from datetime import datetime +from typing import Callable, List, Optional, Sequence, Union + +from tqdm import tqdm + +from feast.batch_feature_view import BatchFeatureView +from feast.entity import Entity +from feast.feature_view import FeatureView +from feast.infra.offline_stores.offline_store import OfflineStore +from feast.infra.online_stores.online_store import OnlineStore +from feast.registry import BaseRegistry +from feast.repo_config import RepoConfig +from feast.stream_feature_view import StreamFeatureView + + +@dataclass +class MaterializationTask: + """ + A MaterializationTask represents a unit of data that needs to be materialized from an + offline store to an online store. + """ + + project: str + feature_view: Union[BatchFeatureView, StreamFeatureView, FeatureView] + start_time: datetime + end_time: datetime + tqdm_builder: Callable[[int], tqdm] + + +class MaterializationJobStatus(enum.Enum): + WAITING = 1 + RUNNING = 2 + AVAILABLE = 3 + ERROR = 4 + CANCELLING = 5 + CANCELLED = 6 + SUCCEEDED = 7 + + +class MaterializationJob(ABC): + """ + MaterializationJob represents an ongoing or executed process that materializes data as per the + definition of a materialization task. + """ + + task: MaterializationTask + + @abstractmethod + def status(self) -> MaterializationJobStatus: + ... + + @abstractmethod + def error(self) -> Optional[BaseException]: + ... + + @abstractmethod + def should_be_retried(self) -> bool: + ... + + @abstractmethod + def job_id(self) -> str: + ... + + @abstractmethod + def url(self) -> Optional[str]: + ... + + +class BatchMaterializationEngine(ABC): + def __init__( + self, + *, + repo_config: RepoConfig, + offline_store: OfflineStore, + online_store: OnlineStore, + **kwargs, + ): + self.repo_config = repo_config + self.offline_store = offline_store + self.online_store = online_store + + @abstractmethod + def update( + self, + project: str, + views_to_delete: Sequence[ + Union[BatchFeatureView, StreamFeatureView, FeatureView] + ], + views_to_keep: Sequence[ + Union[BatchFeatureView, StreamFeatureView, FeatureView] + ], + entities_to_delete: Sequence[Entity], + entities_to_keep: Sequence[Entity], + ): + """This method ensures that any necessary infrastructure or resources needed by the + engine are set up ahead of materialization.""" + + @abstractmethod + def materialize( + self, registry: BaseRegistry, tasks: List[MaterializationTask] + ) -> List[MaterializationJob]: + """ + Materialize data from the offline store to the online store for this feature repo. + Args: + registry: The feast registry containing the applied feature views. + tasks: A list of individual materialization tasks. + Returns: + A list of materialization jobs representing each task. + """ + ... + + @abstractmethod + def teardown_infra( + self, + project: str, + fvs: Sequence[Union[BatchFeatureView, StreamFeatureView, FeatureView]], + entities: Sequence[Entity], + ): + """This method ensures that any infrastructure or resources set up by ``update()``are torn down.""" diff --git a/sdk/python/feast/infra/materialization/local_engine.py b/sdk/python/feast/infra/materialization/local_engine.py new file mode 100644 index 0000000000..4f775981ef --- /dev/null +++ b/sdk/python/feast/infra/materialization/local_engine.py @@ -0,0 +1,185 @@ +from dataclasses import dataclass +from datetime import datetime +from typing import Callable, List, Literal, Optional, Sequence, Union + +from tqdm import tqdm + +from feast.batch_feature_view import BatchFeatureView +from feast.entity import Entity +from feast.feature_view import FeatureView +from feast.infra.offline_stores.offline_store import OfflineStore +from feast.infra.online_stores.online_store import OnlineStore +from feast.repo_config import FeastConfigBaseModel, RepoConfig +from feast.stream_feature_view import StreamFeatureView + +from ...registry import BaseRegistry +from ...utils import ( + _convert_arrow_to_proto, + _get_column_names, + _run_pyarrow_field_mapping, +) +from .batch_materialization_engine import ( + BatchMaterializationEngine, + MaterializationJob, + MaterializationJobStatus, + MaterializationTask, +) + +DEFAULT_BATCH_SIZE = 10_000 + + +class LocalMaterializationEngineConfig(FeastConfigBaseModel): + """Batch Materialization Engine config for local in-process engine""" + + type: Literal["local"] = "local" + """ Type selector""" + + +@dataclass +class LocalMaterializationJob(MaterializationJob): + def __init__( + self, + job_id: str, + status: MaterializationJobStatus, + error: Optional[BaseException] = None, + ) -> None: + super().__init__() + self._job_id: str = job_id + self._status: MaterializationJobStatus = status + self._error: Optional[BaseException] = error + + def status(self) -> MaterializationJobStatus: + return self._status + + def error(self) -> Optional[BaseException]: + return self._error + + def should_be_retried(self) -> bool: + return False + + def job_id(self) -> str: + return self._job_id + + def url(self) -> Optional[str]: + return None + + +class LocalMaterializationEngine(BatchMaterializationEngine): + def update( + self, + project: str, + views_to_delete: Sequence[ + Union[BatchFeatureView, StreamFeatureView, FeatureView] + ], + views_to_keep: Sequence[ + Union[BatchFeatureView, StreamFeatureView, FeatureView] + ], + entities_to_delete: Sequence[Entity], + entities_to_keep: Sequence[Entity], + ): + # Nothing to set up. + pass + + def teardown_infra( + self, + project: str, + fvs: Sequence[Union[BatchFeatureView, StreamFeatureView, FeatureView]], + entities: Sequence[Entity], + ): + # Nothing to tear down. + pass + + def __init__( + self, + *, + repo_config: RepoConfig, + offline_store: OfflineStore, + online_store: OnlineStore, + **kwargs, + ): + super().__init__( + repo_config=repo_config, + offline_store=offline_store, + online_store=online_store, + **kwargs, + ) + + def materialize( + self, registry, tasks: List[MaterializationTask] + ) -> List[MaterializationJob]: + return [ + self._materialize_one( + registry, + task.feature_view, + task.start_time, + task.end_time, + task.project, + task.tqdm_builder, + ) + for task in tasks + ] + + def _materialize_one( + self, + registry: BaseRegistry, + feature_view: Union[BatchFeatureView, StreamFeatureView, FeatureView], + start_date: datetime, + end_date: datetime, + project: str, + tqdm_builder: Callable[[int], tqdm], + ): + entities = [] + for entity_name in feature_view.entities: + entities.append(registry.get_entity(entity_name, project)) + + ( + join_key_columns, + feature_name_columns, + timestamp_field, + created_timestamp_column, + ) = _get_column_names(feature_view, entities) + + job_id = f"{feature_view.name}-{start_date}-{end_date}" + + try: + offline_job = self.offline_store.pull_latest_from_table_or_query( + config=self.repo_config, + data_source=feature_view.batch_source, + join_key_columns=join_key_columns, + feature_name_columns=feature_name_columns, + timestamp_field=timestamp_field, + created_timestamp_column=created_timestamp_column, + start_date=start_date, + end_date=end_date, + ) + + table = offline_job.to_arrow() + + if feature_view.batch_source.field_mapping is not None: + table = _run_pyarrow_field_mapping( + table, feature_view.batch_source.field_mapping + ) + + join_key_to_value_type = { + entity.name: entity.dtype.to_value_type() + for entity in feature_view.entity_columns + } + + with tqdm_builder(table.num_rows) as pbar: + for batch in table.to_batches(DEFAULT_BATCH_SIZE): + rows_to_write = _convert_arrow_to_proto( + batch, feature_view, join_key_to_value_type + ) + self.online_store.online_write_batch( + self.repo_config, + feature_view, + rows_to_write, + lambda x: pbar.update(x), + ) + return LocalMaterializationJob( + job_id=job_id, status=MaterializationJobStatus.SUCCEEDED + ) + except BaseException as e: + return LocalMaterializationJob( + job_id=job_id, status=MaterializationJobStatus.ERROR, error=e + ) diff --git a/sdk/python/feast/infra/offline_stores/file.py b/sdk/python/feast/infra/offline_stores/file.py index 10012c2d80..d60d468174 100644 --- a/sdk/python/feast/infra/offline_stores/file.py +++ b/sdk/python/feast/infra/offline_stores/file.py @@ -29,14 +29,14 @@ DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL, get_pyarrow_schema_from_batch_source, ) -from feast.infra.provider import ( - _get_requested_feature_views_to_features_dict, - _run_dask_field_mapping, -) from feast.registry import BaseRegistry from feast.repo_config import FeastConfigBaseModel, RepoConfig from feast.saved_dataset import SavedDatasetStorage from feast.usage import log_exceptions_and_usage +from feast.utils import ( + _get_requested_feature_views_to_features_dict, + _run_dask_field_mapping, +) class FileOfflineStoreConfig(FeastConfigBaseModel): diff --git a/sdk/python/feast/infra/offline_stores/offline_utils.py b/sdk/python/feast/infra/offline_stores/offline_utils.py index abe8d4e4e5..8b963a864b 100644 --- a/sdk/python/feast/infra/offline_stores/offline_utils.py +++ b/sdk/python/feast/infra/offline_stores/offline_utils.py @@ -17,11 +17,10 @@ from feast.feature_view import FeatureView from feast.importer import import_class from feast.infra.offline_stores.offline_store import OfflineStore -from feast.infra.provider import _get_requested_feature_views_to_features_dict from feast.registry import BaseRegistry from feast.repo_config import RepoConfig from feast.type_map import feast_value_type_to_pa -from feast.utils import to_naive_utc +from feast.utils import _get_requested_feature_views_to_features_dict, to_naive_utc DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL = "event_timestamp" diff --git a/sdk/python/feast/infra/passthrough_provider.py b/sdk/python/feast/infra/passthrough_provider.py index 8c6dd831dd..181d46a5a8 100644 --- a/sdk/python/feast/infra/passthrough_provider.py +++ b/sdk/python/feast/infra/passthrough_provider.py @@ -2,33 +2,42 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import pandas as pd -import pyarrow import pyarrow as pa from tqdm import tqdm -from feast import FeatureService +from feast import importer +from feast.batch_feature_view import BatchFeatureView from feast.entity import Entity from feast.feature_logging import FeatureServiceLoggingSource +from feast.feature_service import FeatureService from feast.feature_view import FeatureView +from feast.infra.materialization import BatchMaterializationEngine, MaterializationTask +from feast.infra.materialization.batch_materialization_engine import ( + MaterializationJobStatus, +) from feast.infra.offline_stores.offline_store import RetrievalJob from feast.infra.offline_stores.offline_utils import get_offline_store_from_config from feast.infra.online_stores.helpers import get_online_store_from_config -from feast.infra.provider import ( - Provider, - _convert_arrow_to_proto, - _get_column_names, - _run_field_mapping, -) +from feast.infra.provider import Provider from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.registry import BaseRegistry from feast.repo_config import RepoConfig from feast.saved_dataset import SavedDataset +from feast.stream_feature_view import StreamFeatureView from feast.usage import RatioSampler, log_exceptions_and_usage, set_usage_attribute -from feast.utils import make_tzaware +from feast.utils import ( + _convert_arrow_to_proto, + _run_pyarrow_field_mapping, + make_tzaware, +) DEFAULT_BATCH_SIZE = 10_000 +BATCH_ENGINE_CLASS_FOR_TYPE = { + "local": "feast.infra.materialization.LocalMaterializationEngine", +} + class PassthroughProvider(Provider): """ @@ -41,6 +50,7 @@ def __init__(self, config: RepoConfig): self.repo_config = config self._offline_store = None self._online_store = None + self._batch_engine: Optional[BatchMaterializationEngine] = None @property def online_store(self): @@ -58,6 +68,46 @@ def offline_store(self): ) return self._offline_store + @property + def batch_engine(self) -> BatchMaterializationEngine: + if self._batch_engine: + return self._batch_engine + else: + engine_config = self.repo_config.batch_engine_config + config_is_dict = False + if isinstance(engine_config, str): + engine_config_type = engine_config + elif isinstance(engine_config, Dict): + if "type" not in engine_config: + raise ValueError("engine_config needs to have a `type` specified.") + engine_config_type = engine_config["type"] + config_is_dict = True + else: + raise RuntimeError( + f"Invalid config type specified for batch_engine: {type(engine_config)}" + ) + + if engine_config_type in BATCH_ENGINE_CLASS_FOR_TYPE: + engine_config_type = BATCH_ENGINE_CLASS_FOR_TYPE[engine_config_type] + engine_module, engine_class_name = engine_config_type.rsplit(".", 1) + engine_class = importer.import_class(engine_module, engine_class_name) + + if config_is_dict: + _batch_engine = engine_class( + repo_config=self.repo_config, + offline_store=self.offline_store, + online_store=self.online_store, + **engine_config, + ) + else: + _batch_engine = engine_class( + repo_config=self.repo_config, + offline_store=self.offline_store, + online_store=self.online_store, + ) + self._batch_engine = _batch_engine + return _batch_engine + def update_infra( self, project: str, @@ -137,7 +187,9 @@ def ingest_df( table = pa.Table.from_pandas(df) if feature_view.batch_source.field_mapping is not None: - table = _run_field_mapping(table, feature_view.batch_source.field_mapping) + table = _run_pyarrow_field_mapping( + table, feature_view.batch_source.field_mapping + ) join_keys = {entity.join_key: entity.value_type for entity in entities} rows_to_write = _convert_arrow_to_proto(table, feature_view, join_keys) @@ -150,7 +202,9 @@ def ingest_df_to_offline_store(self, feature_view: FeatureView, table: pa.Table) set_usage_attribute("provider", self.__class__.__name__) if feature_view.batch_source.field_mapping is not None: - table = _run_field_mapping(table, feature_view.batch_source.field_mapping) + table = _run_pyarrow_field_mapping( + table, feature_view.batch_source.field_mapping + ) self.offline_write_batch(self.repo_config, feature_view, table, None) @@ -165,50 +219,24 @@ def materialize_single_feature_view( tqdm_builder: Callable[[int], tqdm], ) -> None: set_usage_attribute("provider", self.__class__.__name__) - - entities = [] - for entity_name in feature_view.entities: - entities.append(registry.get_entity(entity_name, project)) - - ( - join_key_columns, - feature_name_columns, - timestamp_field, - created_timestamp_column, - ) = _get_column_names(feature_view, entities) - - offline_job = self.offline_store.pull_latest_from_table_or_query( - config=config, - data_source=feature_view.batch_source, - join_key_columns=join_key_columns, - feature_name_columns=feature_name_columns, - timestamp_field=timestamp_field, - created_timestamp_column=created_timestamp_column, - start_date=start_date, - end_date=end_date, + assert ( + isinstance(feature_view, BatchFeatureView) + or isinstance(feature_view, StreamFeatureView) + or isinstance(feature_view, FeatureView) + ), f"Unexpected type for {feature_view.name}: {type(feature_view)}" + task = MaterializationTask( + project=project, + feature_view=feature_view, + start_time=start_date, + end_time=end_date, + tqdm_builder=tqdm_builder, ) - - table = offline_job.to_arrow() - - if feature_view.batch_source.field_mapping is not None: - table = _run_field_mapping(table, feature_view.batch_source.field_mapping) - - join_key_to_value_type = { - entity.name: entity.dtype.to_value_type() - for entity in feature_view.entity_columns - } - - with tqdm_builder(table.num_rows) as pbar: - for batch in table.to_batches(DEFAULT_BATCH_SIZE): - rows_to_write = _convert_arrow_to_proto( - batch, feature_view, join_key_to_value_type - ) - self.online_write_batch( - self.repo_config, - feature_view, - rows_to_write, - lambda x: pbar.update(x), - ) + jobs = self.batch_engine.materialize(registry, [task]) + assert len(jobs) == 1 + if jobs[0].status() == MaterializationJobStatus.ERROR and jobs[0].error(): + e = jobs[0].error() + assert e + raise e def get_historical_features( self, @@ -260,7 +288,7 @@ def retrieve_saved_dataset( def write_feature_service_logs( self, feature_service: FeatureService, - logs: Union[pyarrow.Table, str], + logs: Union[pa.Table, str], config: RepoConfig, registry: BaseRegistry, ): diff --git a/sdk/python/feast/infra/provider.py b/sdk/python/feast/infra/provider.py index d2e37e69db..9695e4d736 100644 --- a/sdk/python/feast/infra/provider.py +++ b/sdk/python/feast/infra/provider.py @@ -1,29 +1,24 @@ import abc -from collections import defaultdict from datetime import datetime from pathlib import Path from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import dask.dataframe as dd import pandas as pd import pyarrow from tqdm import tqdm from feast import FeatureService, errors from feast.entity import Entity -from feast.feature_view import DUMMY_ENTITY_ID, FeatureView +from feast.feature_view import FeatureView from feast.importer import import_class from feast.infra.infra_object import Infra from feast.infra.offline_stores.offline_store import RetrievalJob -from feast.on_demand_feature_view import OnDemandFeatureView from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.registry import BaseRegistry from feast.repo_config import RepoConfig from feast.saved_dataset import SavedDataset -from feast.type_map import python_values_to_proto_values -from feast.value_type import ValueType PROVIDERS_CLASS_FOR_TYPE = { "gcp": "feast.infra.gcp.GcpProvider", @@ -252,198 +247,3 @@ def get_provider(config: RepoConfig, repo_path: Path) -> Provider: cls = import_class(module_name, class_name, "Provider") return cls(config) - - -def _get_requested_feature_views_to_features_dict( - feature_refs: List[str], - feature_views: List[FeatureView], - on_demand_feature_views: List[OnDemandFeatureView], -) -> Tuple[Dict[FeatureView, List[str]], Dict[OnDemandFeatureView, List[str]]]: - """Create a dict of FeatureView -> List[Feature] for all requested features. - Set full_feature_names to True to have feature names prefixed by their feature view name.""" - - feature_views_to_feature_map: Dict[FeatureView, List[str]] = defaultdict(list) - on_demand_feature_views_to_feature_map: Dict[ - OnDemandFeatureView, List[str] - ] = defaultdict(list) - - for ref in feature_refs: - ref_parts = ref.split(":") - feature_view_from_ref = ref_parts[0] - feature_from_ref = ref_parts[1] - - found = False - for fv in feature_views: - if fv.projection.name_to_use() == feature_view_from_ref: - found = True - feature_views_to_feature_map[fv].append(feature_from_ref) - for odfv in on_demand_feature_views: - if odfv.projection.name_to_use() == feature_view_from_ref: - found = True - on_demand_feature_views_to_feature_map[odfv].append(feature_from_ref) - - if not found: - raise ValueError(f"Could not find feature view from reference {ref}") - - return feature_views_to_feature_map, on_demand_feature_views_to_feature_map - - -def _get_column_names( - feature_view: FeatureView, entities: List[Entity] -) -> Tuple[List[str], List[str], str, Optional[str]]: - """ - If a field mapping exists, run it in reverse on the join keys, - feature names, event timestamp column, and created timestamp column - to get the names of the relevant columns in the offline feature store table. - - Returns: - Tuple containing the list of reverse-mapped join_keys, - reverse-mapped feature names, reverse-mapped event timestamp column, - and reverse-mapped created timestamp column that will be passed into - the query to the offline store. - """ - # if we have mapped fields, use the original field names in the call to the offline store - timestamp_field = feature_view.batch_source.timestamp_field - feature_names = [feature.name for feature in feature_view.features] - created_timestamp_column = feature_view.batch_source.created_timestamp_column - join_keys = [ - entity.join_key for entity in entities if entity.join_key != DUMMY_ENTITY_ID - ] - if feature_view.batch_source.field_mapping is not None: - reverse_field_mapping = { - v: k for k, v in feature_view.batch_source.field_mapping.items() - } - timestamp_field = ( - reverse_field_mapping[timestamp_field] - if timestamp_field in reverse_field_mapping.keys() - else timestamp_field - ) - created_timestamp_column = ( - reverse_field_mapping[created_timestamp_column] - if created_timestamp_column - and created_timestamp_column in reverse_field_mapping.keys() - else created_timestamp_column - ) - join_keys = [ - reverse_field_mapping[col] if col in reverse_field_mapping.keys() else col - for col in join_keys - ] - feature_names = [ - reverse_field_mapping[col] if col in reverse_field_mapping.keys() else col - for col in feature_names - ] - - # We need to exclude join keys and timestamp columns from the list of features, after they are mapped to - # their final column names via the `field_mapping` field of the source. - feature_names = [ - name - for name in feature_names - if name not in join_keys - and name != timestamp_field - and name != created_timestamp_column - ] - return ( - join_keys, - feature_names, - timestamp_field, - created_timestamp_column, - ) - - -def _run_field_mapping( - table: pyarrow.Table, field_mapping: Dict[str, str], -) -> pyarrow.Table: - # run field mapping in the forward direction - cols = table.column_names - mapped_cols = [ - field_mapping[col] if col in field_mapping.keys() else col for col in cols - ] - table = table.rename_columns(mapped_cols) - return table - - -def _run_dask_field_mapping( - table: dd.DataFrame, field_mapping: Dict[str, str], -): - if field_mapping: - # run field mapping in the forward direction - table = table.rename(columns=field_mapping) - table = table.persist() - - return table - - -def _coerce_datetime(ts): - """ - Depending on underlying time resolution, arrow to_pydict() sometimes returns pd - timestamp type (for nanosecond resolution), and sometimes you get standard python datetime - (for microsecond resolution). - While pd timestamp class is a subclass of python datetime, it doesn't always behave the - same way. We convert it to normal datetime so that consumers downstream don't have to deal - with these quirks. - """ - if isinstance(ts, pd.Timestamp): - return ts.to_pydatetime() - else: - return ts - - -def _convert_arrow_to_proto( - table: Union[pyarrow.Table, pyarrow.RecordBatch], - feature_view: FeatureView, - join_keys: Dict[str, ValueType], -) -> List[Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]]]: - # Avoid ChunkedArrays which guarentees `zero_copy_only` availiable. - if isinstance(table, pyarrow.Table): - table = table.to_batches()[0] - - columns = [ - (field.name, field.dtype.to_value_type()) for field in feature_view.features - ] + list(join_keys.items()) - - proto_values_by_column = { - column: python_values_to_proto_values( - table.column(column).to_numpy(zero_copy_only=False), value_type - ) - for column, value_type in columns - } - - entity_keys = [ - EntityKeyProto( - join_keys=join_keys, - entity_values=[proto_values_by_column[k][idx] for k in join_keys], - ) - for idx in range(table.num_rows) - ] - - # Serialize the features per row - feature_dict = { - feature.name: proto_values_by_column[feature.name] - for feature in feature_view.features - } - features = [dict(zip(feature_dict, vars)) for vars in zip(*feature_dict.values())] - - # Convert event_timestamps - event_timestamps = [ - _coerce_datetime(val) - for val in pd.to_datetime( - table.column(feature_view.batch_source.timestamp_field).to_numpy( - zero_copy_only=False - ) - ) - ] - - # Convert created_timestamps if they exist - if feature_view.batch_source.created_timestamp_column: - created_timestamps = [ - _coerce_datetime(val) - for val in pd.to_datetime( - table.column( - feature_view.batch_source.created_timestamp_column - ).to_numpy(zero_copy_only=False) - ) - ] - else: - created_timestamps = [None] * table.num_rows - - return list(zip(entity_keys, features, event_timestamps, created_timestamps)) diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index b7cf1683dc..f315023ee1 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -120,6 +120,9 @@ class RepoConfig(FeastBaseModel): _offline_config: Any = Field(alias="offline_store") """ OfflineStoreConfig: Offline store configuration (optional depending on provider) """ + batch_engine_config: Any = Field(alias="batch_engine") + """ BatchMaterializationEngine: Batch materialization configuration (optional depending on provider)""" + feature_server: Optional[Any] """ FeatureServerConfig: Feature server configuration (optional depending on provider) """ @@ -155,6 +158,13 @@ def __init__(self, **data: Any): elif data["provider"] == "aws": self._online_config = "dynamodb" + self._batch_engine = None + if "batch_engine" in data: + self.batch_engine_config = data["batch_engine"] + else: + # Defaults to using local in-process materialization engine. + self.batch_engine_config = "local" + if isinstance(self.feature_server, Dict): self.feature_server = get_feature_server_config_from_type( self.feature_server["type"] diff --git a/sdk/python/feast/utils.py b/sdk/python/feast/utils.py index a40f423c53..9f18da38cd 100644 --- a/sdk/python/feast/utils.py +++ b/sdk/python/feast/utils.py @@ -1,8 +1,24 @@ +import typing +from collections import defaultdict from datetime import datetime +from typing import Dict, List, Optional, Tuple, Union +import pandas as pd +import pyarrow +from dask import dataframe as dd from dateutil.tz import tzlocal from pytz import utc +from feast.entity import Entity +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto +from feast.type_map import python_values_to_proto_values +from feast.value_type import ValueType + +if typing.TYPE_CHECKING: + from feast.feature_view import FeatureView + from feast.on_demand_feature_view import OnDemandFeatureView + def make_tzaware(t: datetime) -> datetime: """We assume tz-naive datetimes are UTC""" @@ -24,3 +40,201 @@ def maybe_local_tz(t: datetime) -> datetime: return t.replace(tzinfo=tzlocal()) else: return t + + +def _get_requested_feature_views_to_features_dict( + feature_refs: List[str], + feature_views: List["FeatureView"], + on_demand_feature_views: List["OnDemandFeatureView"], +) -> Tuple[Dict["FeatureView", List[str]], Dict["OnDemandFeatureView", List[str]]]: + """Create a dict of FeatureView -> List[Feature] for all requested features. + Set full_feature_names to True to have feature names prefixed by their feature view name.""" + + feature_views_to_feature_map: Dict["FeatureView", List[str]] = defaultdict(list) + on_demand_feature_views_to_feature_map: Dict[ + "OnDemandFeatureView", List[str] + ] = defaultdict(list) + + for ref in feature_refs: + ref_parts = ref.split(":") + feature_view_from_ref = ref_parts[0] + feature_from_ref = ref_parts[1] + + found = False + for fv in feature_views: + if fv.projection.name_to_use() == feature_view_from_ref: + found = True + feature_views_to_feature_map[fv].append(feature_from_ref) + for odfv in on_demand_feature_views: + if odfv.projection.name_to_use() == feature_view_from_ref: + found = True + on_demand_feature_views_to_feature_map[odfv].append(feature_from_ref) + + if not found: + raise ValueError(f"Could not find feature view from reference {ref}") + + return feature_views_to_feature_map, on_demand_feature_views_to_feature_map + + +def _get_column_names( + feature_view: "FeatureView", entities: List[Entity] +) -> Tuple[List[str], List[str], str, Optional[str]]: + """ + If a field mapping exists, run it in reverse on the join keys, + feature names, event timestamp column, and created timestamp column + to get the names of the relevant columns in the offline feature store table. + + Returns: + Tuple containing the list of reverse-mapped join_keys, + reverse-mapped feature names, reverse-mapped event timestamp column, + and reverse-mapped created timestamp column that will be passed into + the query to the offline store. + """ + # if we have mapped fields, use the original field names in the call to the offline store + timestamp_field = feature_view.batch_source.timestamp_field + feature_names = [feature.name for feature in feature_view.features] + created_timestamp_column = feature_view.batch_source.created_timestamp_column + + from feast.feature_view import DUMMY_ENTITY_ID + + join_keys = [ + entity.join_key for entity in entities if entity.join_key != DUMMY_ENTITY_ID + ] + if feature_view.batch_source.field_mapping is not None: + reverse_field_mapping = { + v: k for k, v in feature_view.batch_source.field_mapping.items() + } + timestamp_field = ( + reverse_field_mapping[timestamp_field] + if timestamp_field in reverse_field_mapping.keys() + else timestamp_field + ) + created_timestamp_column = ( + reverse_field_mapping[created_timestamp_column] + if created_timestamp_column + and created_timestamp_column in reverse_field_mapping.keys() + else created_timestamp_column + ) + join_keys = [ + reverse_field_mapping[col] if col in reverse_field_mapping.keys() else col + for col in join_keys + ] + feature_names = [ + reverse_field_mapping[col] if col in reverse_field_mapping.keys() else col + for col in feature_names + ] + + # We need to exclude join keys and timestamp columns from the list of features, after they are mapped to + # their final column names via the `field_mapping` field of the source. + feature_names = [ + name + for name in feature_names + if name not in join_keys + and name != timestamp_field + and name != created_timestamp_column + ] + return ( + join_keys, + feature_names, + timestamp_field, + created_timestamp_column, + ) + + +def _run_pyarrow_field_mapping( + table: pyarrow.Table, field_mapping: Dict[str, str], +) -> pyarrow.Table: + # run field mapping in the forward direction + cols = table.column_names + mapped_cols = [ + field_mapping[col] if col in field_mapping.keys() else col for col in cols + ] + table = table.rename_columns(mapped_cols) + return table + + +def _run_dask_field_mapping( + table: dd.DataFrame, field_mapping: Dict[str, str], +): + if field_mapping: + # run field mapping in the forward direction + table = table.rename(columns=field_mapping) + table = table.persist() + + return table + + +def _coerce_datetime(ts): + """ + Depending on underlying time resolution, arrow to_pydict() sometimes returns pd + timestamp type (for nanosecond resolution), and sometimes you get standard python datetime + (for microsecond resolution). + While pd timestamp class is a subclass of python datetime, it doesn't always behave the + same way. We convert it to normal datetime so that consumers downstream don't have to deal + with these quirks. + """ + if isinstance(ts, pd.Timestamp): + return ts.to_pydatetime() + else: + return ts + + +def _convert_arrow_to_proto( + table: Union[pyarrow.Table, pyarrow.RecordBatch], + feature_view: "FeatureView", + join_keys: Dict[str, ValueType], +) -> List[Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]]]: + # Avoid ChunkedArrays which guarantees `zero_copy_only` available. + if isinstance(table, pyarrow.Table): + table = table.to_batches()[0] + + columns = [ + (field.name, field.dtype.to_value_type()) for field in feature_view.features + ] + list(join_keys.items()) + + proto_values_by_column = { + column: python_values_to_proto_values( + table.column(column).to_numpy(zero_copy_only=False), value_type + ) + for column, value_type in columns + } + + entity_keys = [ + EntityKeyProto( + join_keys=join_keys, + entity_values=[proto_values_by_column[k][idx] for k in join_keys], + ) + for idx in range(table.num_rows) + ] + + # Serialize the features per row + feature_dict = { + feature.name: proto_values_by_column[feature.name] + for feature in feature_view.features + } + features = [dict(zip(feature_dict, vars)) for vars in zip(*feature_dict.values())] + + # Convert event_timestamps + event_timestamps = [ + _coerce_datetime(val) + for val in pd.to_datetime( + table.column(feature_view.batch_source.timestamp_field).to_numpy( + zero_copy_only=False + ) + ) + ] + + # Convert created_timestamps if they exist + if feature_view.batch_source.created_timestamp_column: + created_timestamps = [ + _coerce_datetime(val) + for val in pd.to_datetime( + table.column( + feature_view.batch_source.created_timestamp_column + ).to_numpy(zero_copy_only=False) + ) + ] + else: + created_timestamps = [None] * table.num_rows + + return list(zip(entity_keys, features, event_timestamps, created_timestamps)) diff --git a/sdk/python/tests/unit/infra/test_provider.py b/sdk/python/tests/unit/infra/test_provider.py index 5ed5603b03..217a1361b4 100644 --- a/sdk/python/tests/unit/infra/test_provider.py +++ b/sdk/python/tests/unit/infra/test_provider.py @@ -18,8 +18,8 @@ from feast.entity import Entity from feast.feature_view import FeatureView from feast.field import Field -from feast.infra.provider import _get_column_names from feast.types import String +from feast.utils import _get_column_names def test_get_column_names_preserves_feature_ordering(): diff --git a/sdk/python/tests/utils/online_write_benchmark.py b/sdk/python/tests/utils/online_write_benchmark.py index 9f2f8ba60d..8a138f41db 100644 --- a/sdk/python/tests/utils/online_write_benchmark.py +++ b/sdk/python/tests/utils/online_write_benchmark.py @@ -14,9 +14,9 @@ from feast.feature_store import FeatureStore from feast.feature_view import FeatureView from feast.field import Field -from feast.infra.provider import _convert_arrow_to_proto from feast.repo_config import RepoConfig from feast.types import Float32, Int32 +from feast.utils import _convert_arrow_to_proto def create_driver_hourly_stats_feature_view(source): From 38fd00195f8ed309b2e7bae06d48cb10ab82f5aa Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Wed, 6 Jul 2022 11:01:13 -0700 Subject: [PATCH 16/73] fix: Revert "feat: Add snowflake online store (#2902)" (#2909) This reverts commit f758f9e148212d08f63df155e864940c27d92155. Signed-off-by: Kevin Zhang --- .../feast/infra/online_stores/snowflake.py | 232 ------------------ .../feast/infra/utils/snowflake_utils.py | 181 +------------- .../feature_repos/repo_configuration.py | 12 - 3 files changed, 2 insertions(+), 423 deletions(-) delete mode 100644 sdk/python/feast/infra/online_stores/snowflake.py diff --git a/sdk/python/feast/infra/online_stores/snowflake.py b/sdk/python/feast/infra/online_stores/snowflake.py deleted file mode 100644 index 80074cf509..0000000000 --- a/sdk/python/feast/infra/online_stores/snowflake.py +++ /dev/null @@ -1,232 +0,0 @@ -import itertools -import os -from binascii import hexlify -from datetime import datetime -from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple - -import pandas as pd -import pytz -from pydantic import Field -from pydantic.schema import Literal - -from feast import Entity, FeatureView -from feast.infra.key_encoding_utils import serialize_entity_key -from feast.infra.online_stores.online_store import OnlineStore -from feast.infra.utils.snowflake_utils import get_snowflake_conn, write_pandas_binary -from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto -from feast.protos.feast.types.Value_pb2 import Value as ValueProto -from feast.repo_config import FeastConfigBaseModel, RepoConfig -from feast.usage import log_exceptions_and_usage - - -class SnowflakeOnlineStoreConfig(FeastConfigBaseModel): - """ Online store config for Snowflake """ - - type: Literal["snowflake.online"] = "snowflake.online" - """ Online store type selector""" - - config_path: Optional[str] = ( - Path(os.environ["HOME"]) / ".snowsql/config" - ).__str__() - """ Snowflake config path -- absolute path required (Can't use ~)""" - - account: Optional[str] = None - """ Snowflake deployment identifier -- drop .snowflakecomputing.com""" - - user: Optional[str] = None - """ Snowflake user name """ - - password: Optional[str] = None - """ Snowflake password """ - - role: Optional[str] = None - """ Snowflake role name""" - - warehouse: Optional[str] = None - """ Snowflake warehouse name """ - - database: Optional[str] = None - """ Snowflake database name """ - - schema_: Optional[str] = Field("PUBLIC", alias="schema") - """ Snowflake schema name """ - - class Config: - allow_population_by_field_name = True - - -class SnowflakeOnlineStore(OnlineStore): - @log_exceptions_and_usage(online_store="snowflake") - def online_write_batch( - self, - config: RepoConfig, - table: FeatureView, - data: List[ - Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] - ], - progress: Optional[Callable[[int], Any]], - ) -> None: - assert isinstance(config.online_store, SnowflakeOnlineStoreConfig) - - dfs = [None] * len(data) - for i, (entity_key, values, timestamp, created_ts) in enumerate(data): - - df = pd.DataFrame( - columns=[ - "entity_feature_key", - "entity_key", - "feature_name", - "value", - "event_ts", - "created_ts", - ], - index=range(0, len(values)), - ) - - timestamp = _to_naive_utc(timestamp) - if created_ts is not None: - created_ts = _to_naive_utc(created_ts) - - for j, (feature_name, val) in enumerate(values.items()): - df.loc[j, "entity_feature_key"] = serialize_entity_key( - entity_key - ) + bytes(feature_name, encoding="utf-8") - df.loc[j, "entity_key"] = serialize_entity_key(entity_key) - df.loc[j, "feature_name"] = feature_name - df.loc[j, "value"] = val.SerializeToString() - df.loc[j, "event_ts"] = timestamp - df.loc[j, "created_ts"] = created_ts - - dfs[i] = df - if progress: - progress(1) - - if dfs: - agg_df = pd.concat(dfs) - - with get_snowflake_conn(config.online_store, autocommit=False) as conn: - - write_pandas_binary(conn, agg_df, f"{config.project}_{table.name}") - - query = f""" - INSERT OVERWRITE INTO "{config.online_store.database}"."{config.online_store.schema_}"."{config.project}_{table.name}" - SELECT - "entity_feature_key", - "entity_key", - "feature_name", - "value", - "event_ts", - "created_ts" - FROM - (SELECT - *, - ROW_NUMBER() OVER(PARTITION BY "entity_key","feature_name" ORDER BY "event_ts" DESC, "created_ts" DESC) AS "_feast_row" - FROM - "{config.online_store.database}"."{config.online_store.schema_}"."{config.project}_{table.name}") - WHERE - "_feast_row" = 1; - """ - - conn.cursor().execute(query) - - return None - - @log_exceptions_and_usage(online_store="snowflake") - def online_read( - self, - config: RepoConfig, - table: FeatureView, - entity_keys: List[EntityKeyProto], - requested_features: List[str], - ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: - assert isinstance(config.online_store, SnowflakeOnlineStoreConfig) - - result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] - - with get_snowflake_conn(config.online_store) as conn: - - df = ( - conn.cursor() - .execute( - f""" - SELECT - "entity_key", "feature_name", "value", "event_ts" - FROM - "{config.online_store.database}"."{config.online_store.schema_}"."{config.project}_{table.name}" - WHERE - "entity_feature_key" IN ({','.join([('TO_BINARY('+hexlify(serialize_entity_key(combo[0])+bytes(combo[1], encoding='utf-8')).__str__()[1:]+")") for combo in itertools.product(entity_keys,requested_features)])}) - """, - ) - .fetch_pandas_all() - ) - - for entity_key in entity_keys: - entity_key_bin = serialize_entity_key(entity_key) - res = {} - res_ts = None - for index, row in df[df["entity_key"] == entity_key_bin].iterrows(): - val = ValueProto() - val.ParseFromString(row["value"]) - res[row["feature_name"]] = val - res_ts = row["event_ts"].to_pydatetime() - - if not res: - result.append((None, None)) - else: - result.append((res_ts, res)) - return result - - @log_exceptions_and_usage(online_store="snowflake") - def update( - self, - config: RepoConfig, - tables_to_delete: Sequence[FeatureView], - tables_to_keep: Sequence[FeatureView], - entities_to_delete: Sequence[Entity], - entities_to_keep: Sequence[Entity], - partial: bool, - ): - assert isinstance(config.online_store, SnowflakeOnlineStoreConfig) - - with get_snowflake_conn(config.online_store) as conn: - - for table in tables_to_keep: - - conn.cursor().execute( - f"""CREATE TABLE IF NOT EXISTS "{config.online_store.database}"."{config.online_store.schema_}"."{config.project}_{table.name}" ( - "entity_feature_key" BINARY, - "entity_key" BINARY, - "feature_name" VARCHAR, - "value" BINARY, - "event_ts" TIMESTAMP, - "created_ts" TIMESTAMP - )""" - ) - - for table in tables_to_delete: - - conn.cursor().execute( - f'DROP TABLE IF EXISTS "{config.online_store.database}"."{config.online_store.schema_}"."{config.project}_{table.name}"' - ) - - def teardown( - self, - config: RepoConfig, - tables: Sequence[FeatureView], - entities: Sequence[Entity], - ): - assert isinstance(config.online_store, SnowflakeOnlineStoreConfig) - - with get_snowflake_conn(config.online_store) as conn: - - for table in tables: - query = f'DROP TABLE IF EXISTS "{config.online_store.database}"."{config.online_store.schema_}"."{config.project}_{table.name}"' - conn.cursor().execute(query) - - -def _to_naive_utc(ts: datetime): - if ts.tzinfo is None: - return ts - else: - return ts.astimezone(pytz.utc).replace(tzinfo=None) diff --git a/sdk/python/feast/infra/utils/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake_utils.py index d9be930439..05834ae436 100644 --- a/sdk/python/feast/infra/utils/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake_utils.py @@ -44,12 +44,8 @@ def execute_snowflake_statement(conn: SnowflakeConnection, query) -> SnowflakeCu def get_snowflake_conn(config, autocommit=True) -> SnowflakeConnection: - assert config.type in ["snowflake.offline", "snowflake.online"] - - if config.type == "snowflake.offline": - config_header = "connections.feast_offline_store" - elif config.type == "snowflake.online": - config_header = "connections.feast_online_store" + assert config.type == "snowflake.offline" + config_header = "connections.feast_offline_store" config_dict = dict(config) @@ -433,176 +429,3 @@ def parse_private_key_path(key_path: str, private_key_passphrase: str) -> bytes: ) return pkb - - -def write_pandas_binary( - conn: SnowflakeConnection, - df: pd.DataFrame, - table_name: str, - database: Optional[str] = None, - schema: Optional[str] = None, - chunk_size: Optional[int] = None, - compression: str = "gzip", - on_error: str = "abort_statement", - parallel: int = 4, - quote_identifiers: bool = True, - auto_create_table: bool = False, - create_temp_table: bool = False, -): - """Allows users to most efficiently write back a pandas DataFrame to Snowflake. - - It works by dumping the DataFrame into Parquet files, uploading them and finally copying their data into the table. - - Returns whether all files were ingested correctly, number of chunks uploaded, and number of rows ingested - with all of the COPY INTO command's output for debugging purposes. - - Example usage: - import pandas - from snowflake.connector.pandas_tools import write_pandas - - df = pandas.DataFrame([('Mark', 10), ('Luke', 20)], columns=['name', 'balance']) - success, nchunks, nrows, _ = write_pandas(cnx, df, 'customers') - - Args: - conn: Connection to be used to communicate with Snowflake. - df: Dataframe we'd like to write back. - table_name: Table name where we want to insert into. - database: Database schema and table is in, if not provided the default one will be used (Default value = None). - schema: Schema table is in, if not provided the default one will be used (Default value = None). - chunk_size: Number of elements to be inserted once, if not provided all elements will be dumped once - (Default value = None). - compression: The compression used on the Parquet files, can only be gzip, or snappy. Gzip gives supposedly a - better compression, while snappy is faster. Use whichever is more appropriate (Default value = 'gzip'). - on_error: Action to take when COPY INTO statements fail, default follows documentation at: - https://docs.snowflake.com/en/sql-reference/sql/copy-into-table.html#copy-options-copyoptions - (Default value = 'abort_statement'). - parallel: Number of threads to be used when uploading chunks, default follows documentation at: - https://docs.snowflake.com/en/sql-reference/sql/put.html#optional-parameters (Default value = 4). - quote_identifiers: By default, identifiers, specifically database, schema, table and column names - (from df.columns) will be quoted. If set to False, identifiers are passed on to Snowflake without quoting. - I.e. identifiers will be coerced to uppercase by Snowflake. (Default value = True) - auto_create_table: When true, will automatically create a table with corresponding columns for each column in - the passed in DataFrame. The table will not be created if it already exists - create_temp_table: Will make the auto-created table as a temporary table - """ - if database is not None and schema is None: - raise ProgrammingError( - "Schema has to be provided to write_pandas when a database is provided" - ) - # This dictionary maps the compression algorithm to Snowflake put copy into command type - # https://docs.snowflake.com/en/sql-reference/sql/copy-into-table.html#type-parquet - compression_map = {"gzip": "auto", "snappy": "snappy"} - if compression not in compression_map.keys(): - raise ProgrammingError( - "Invalid compression '{}', only acceptable values are: {}".format( - compression, compression_map.keys() - ) - ) - if quote_identifiers: - location = ( - (('"' + database + '".') if database else "") - + (('"' + schema + '".') if schema else "") - + ('"' + table_name + '"') - ) - else: - location = ( - (database + "." if database else "") - + (schema + "." if schema else "") - + (table_name) - ) - if chunk_size is None: - chunk_size = len(df) - cursor: SnowflakeCursor = conn.cursor() - stage_name = create_temporary_sfc_stage(cursor) - - with TemporaryDirectory() as tmp_folder: - for i, chunk in chunk_helper(df, chunk_size): - chunk_path = os.path.join(tmp_folder, "file{}.txt".format(i)) - # Dump chunk into parquet file - chunk.to_parquet( - chunk_path, - compression=compression, - use_deprecated_int96_timestamps=True, - ) - # Upload parquet file - upload_sql = ( - "PUT /* Python:snowflake.connector.pandas_tools.write_pandas() */ " - "'file://{path}' @\"{stage_name}\" PARALLEL={parallel}" - ).format( - path=chunk_path.replace("\\", "\\\\").replace("'", "\\'"), - stage_name=stage_name, - parallel=parallel, - ) - logger.debug(f"uploading files with '{upload_sql}'") - cursor.execute(upload_sql, _is_internal=True) - # Remove chunk file - os.remove(chunk_path) - if quote_identifiers: - columns = '"' + '","'.join(list(df.columns)) + '"' - else: - columns = ",".join(list(df.columns)) - - if auto_create_table: - file_format_name = create_file_format(compression, compression_map, cursor) - infer_schema_sql = f"SELECT COLUMN_NAME, TYPE FROM table(infer_schema(location=>'@\"{stage_name}\"', file_format=>'{file_format_name}'))" - logger.debug(f"inferring schema with '{infer_schema_sql}'") - result_cursor = cursor.execute(infer_schema_sql, _is_internal=True) - if result_cursor is None: - raise SnowflakeQueryUnknownError(infer_schema_sql) - result = cast(List[Tuple[str, str]], result_cursor.fetchall()) - column_type_mapping: Dict[str, str] = dict(result) - # Infer schema can return the columns out of order depending on the chunking we do when uploading - # so we have to iterate through the dataframe columns to make sure we create the table with its - # columns in order - quote = '"' if quote_identifiers else "" - create_table_columns = ", ".join( - [f"{quote}{c}{quote} {column_type_mapping[c]}" for c in df.columns] - ) - create_table_sql = ( - f"CREATE {'TEMP ' if create_temp_table else ''}TABLE IF NOT EXISTS {location} " - f"({create_table_columns})" - f" /* Python:snowflake.connector.pandas_tools.write_pandas() */ " - ) - logger.debug(f"auto creating table with '{create_table_sql}'") - cursor.execute(create_table_sql, _is_internal=True) - drop_file_format_sql = f"DROP FILE FORMAT IF EXISTS {file_format_name}" - logger.debug(f"dropping file format with '{drop_file_format_sql}'") - cursor.execute(drop_file_format_sql, _is_internal=True) - - # in Snowflake, all parquet data is stored in a single column, $1, so we must select columns explicitly - # see (https://docs.snowflake.com/en/user-guide/script-data-load-transform-parquet.html) - if quote_identifiers: - parquet_columns = ",".join( - f'TO_BINARY($1:"{c}")' - if c in ["entity_feature_key", "entity_key", "value"] - else f'$1:"{c}"' - for c in df.columns - ) - else: - parquet_columns = ",".join( - f"TO_BINARY($1:{c})" - if c in ["entity_feature_key", "entity_key", "value"] - else f"$1:{c}" - for c in df.columns - ) - - copy_into_sql = ( - "COPY INTO {location} /* Python:snowflake.connector.pandas_tools.write_pandas() */ " - "({columns}) " - 'FROM (SELECT {parquet_columns} FROM @"{stage_name}") ' - "FILE_FORMAT=(TYPE=PARQUET COMPRESSION={compression} BINARY_AS_TEXT = FALSE) " - "PURGE=TRUE ON_ERROR={on_error}" - ).format( - location=location, - columns=columns, - parquet_columns=parquet_columns, - stage_name=stage_name, - compression=compression_map[compression], - on_error=on_error, - ) - logger.debug("copying into with '{}'".format(copy_into_sql)) - # Snowflake returns the original cursor if the query execution succeeded. - result_cursor = cursor.execute(copy_into_sql, _is_internal=True) - if result_cursor is None: - raise SnowflakeQueryUnknownError(copy_into_sql) - result_cursor.close() diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 67e585839d..a168f4f028 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -74,17 +74,6 @@ "connection_string": "127.0.0.1:6001,127.0.0.1:6002,127.0.0.1:6003", } -SNOWFLAKE_CONFIG = { - "type": "snowflake.online", - "account": os.environ["SNOWFLAKE_CI_DEPLOYMENT"], - "user": os.environ["SNOWFLAKE_CI_USER"], - "password": os.environ["SNOWFLAKE_CI_PASSWORD"], - "role": os.environ["SNOWFLAKE_CI_ROLE"], - "warehouse": os.environ["SNOWFLAKE_CI_WAREHOUSE"], - "database": "FEAST", - "schema": "ONLINE", -} - OFFLINE_STORE_TO_PROVIDER_CONFIG: Dict[str, DataSourceCreator] = { "file": ("local", FileDataSourceCreator), "bigquery": ("gcp", BigQueryDataSourceCreator), @@ -114,7 +103,6 @@ AVAILABLE_ONLINE_STORES["redis"] = (REDIS_CONFIG, None) AVAILABLE_ONLINE_STORES["dynamodb"] = (DYNAMO_CONFIG, None) AVAILABLE_ONLINE_STORES["datastore"] = ("datastore", None) - AVAILABLE_ONLINE_STORES["snowflake"] = (SNOWFLAKE_CONFIG, None) full_repo_configs_module = os.environ.get(FULL_REPO_CONFIGS_MODULE_ENV_NAME) From adf3212fd8d63a8cc5c3fa6377e361c5ec64e89d Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Wed, 6 Jul 2022 11:38:13 -0700 Subject: [PATCH 17/73] chore: Refactor StreamFeatureViewMeta to FeatureViewMeta and dedupe (#2915) * chore: Refactor StreamFeatureViewMeta to FeatureViewMeta and deduplicate code Signed-off-by: Achal Shah * ttl_duration Signed-off-by: Achal Shah * nits Signed-off-by: Achal Shah --- protos/feast/core/StreamFeatureView.proto | 12 +------- sdk/python/feast/feature_view.py | 37 ++++++++++++++--------- sdk/python/feast/stream_feature_view.py | 26 ++-------------- 3 files changed, 25 insertions(+), 50 deletions(-) diff --git a/protos/feast/core/StreamFeatureView.proto b/protos/feast/core/StreamFeatureView.proto index d217b86a3f..06e9ee0612 100644 --- a/protos/feast/core/StreamFeatureView.proto +++ b/protos/feast/core/StreamFeatureView.proto @@ -34,7 +34,7 @@ import "feast/core/Aggregation.proto"; message StreamFeatureView { // User-specified specifications of this feature view. StreamFeatureViewSpec spec = 1; - StreamFeatureViewMeta meta = 2; + FeatureViewMeta meta = 2; } // Next available id: 17 @@ -90,13 +90,3 @@ message StreamFeatureViewSpec { string timestamp_field = 16; } -message StreamFeatureViewMeta { - // Time where this Feature View is created - google.protobuf.Timestamp created_timestamp = 1; - - // Time where this Feature View is last updated - google.protobuf.Timestamp last_updated_timestamp = 2; - - // List of pairs (start_time, end_time) for which this feature view has been materialized. - repeated MaterializationInterval materialization_intervals = 3; -} diff --git a/sdk/python/feast/feature_view.py b/sdk/python/feast/feature_view.py index 348c3019c5..dd8cb4f0a6 100644 --- a/sdk/python/feast/feature_view.py +++ b/sdk/python/feast/feature_view.py @@ -407,21 +407,8 @@ def to_proto(self) -> FeatureViewProto: Returns: A FeatureViewProto protobuf. """ - meta = FeatureViewMetaProto(materialization_intervals=[]) - if self.created_timestamp: - meta.created_timestamp.FromDatetime(self.created_timestamp) - if self.last_updated_timestamp: - meta.last_updated_timestamp.FromDatetime(self.last_updated_timestamp) - for interval in self.materialization_intervals: - interval_proto = MaterializationIntervalProto() - interval_proto.start_time.FromDatetime(interval[0]) - interval_proto.end_time.FromDatetime(interval[1]) - meta.materialization_intervals.append(interval_proto) - - ttl_duration = None - if self.ttl is not None: - ttl_duration = Duration() - ttl_duration.FromTimedelta(self.ttl) + meta = self.to_proto_meta() + ttl_duration = self.get_ttl_duration() batch_source_proto = self.batch_source.to_proto() batch_source_proto.data_source_class_type = f"{self.batch_source.__class__.__module__}.{self.batch_source.__class__.__name__}" @@ -447,6 +434,26 @@ def to_proto(self) -> FeatureViewProto: return FeatureViewProto(spec=spec, meta=meta) + def to_proto_meta(self): + meta = FeatureViewMetaProto(materialization_intervals=[]) + if self.created_timestamp: + meta.created_timestamp.FromDatetime(self.created_timestamp) + if self.last_updated_timestamp: + meta.last_updated_timestamp.FromDatetime(self.last_updated_timestamp) + for interval in self.materialization_intervals: + interval_proto = MaterializationIntervalProto() + interval_proto.start_time.FromDatetime(interval[0]) + interval_proto.end_time.FromDatetime(interval[1]) + meta.materialization_intervals.append(interval_proto) + return meta + + def get_ttl_duration(self): + ttl_duration = None + if self.ttl is not None: + ttl_duration = Duration() + ttl_duration.FromTimedelta(self.ttl) + return ttl_duration + @classmethod def from_proto(cls, feature_view_proto: FeatureViewProto): """ diff --git a/sdk/python/feast/stream_feature_view.py b/sdk/python/feast/stream_feature_view.py index 077d8ab89a..f19b1fcff7 100644 --- a/sdk/python/feast/stream_feature_view.py +++ b/sdk/python/feast/stream_feature_view.py @@ -6,7 +6,6 @@ from typing import Dict, List, Optional, Tuple, Union import dill -from google.protobuf.duration_pb2 import Duration from typeguard import typechecked from feast import utils @@ -16,18 +15,12 @@ from feast.feature_view import FeatureView from feast.field import Field from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto -from feast.protos.feast.core.FeatureView_pb2 import ( - MaterializationInterval as MaterializationIntervalProto, -) from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( UserDefinedFunction as UserDefinedFunctionProto, ) from feast.protos.feast.core.StreamFeatureView_pb2 import ( StreamFeatureView as StreamFeatureViewProto, ) -from feast.protos.feast.core.StreamFeatureView_pb2 import ( - StreamFeatureViewMeta as StreamFeatureViewMetaProto, -) from feast.protos.feast.core.StreamFeatureView_pb2 import ( StreamFeatureViewSpec as StreamFeatureViewSpecProto, ) @@ -170,23 +163,8 @@ def __hash__(self) -> int: return super().__hash__() def to_proto(self): - meta = StreamFeatureViewMetaProto(materialization_intervals=[]) - if self.created_timestamp: - meta.created_timestamp.FromDatetime(self.created_timestamp) - - if self.last_updated_timestamp: - meta.last_updated_timestamp.FromDatetime(self.last_updated_timestamp) - - for interval in self.materialization_intervals: - interval_proto = MaterializationIntervalProto() - interval_proto.start_time.FromDatetime(interval[0]) - interval_proto.end_time.FromDatetime(interval[1]) - meta.materialization_intervals.append(interval_proto) - - ttl_duration = None - if self.ttl is not None: - ttl_duration = Duration() - ttl_duration.FromTimedelta(self.ttl) + meta = self.to_proto_meta() + ttl_duration = self.get_ttl_duration() batch_source_proto = None if self.batch_source: From dcd8ec9f02742d520bd1cca89d5437ea8e20d28d Mon Sep 17 00:00:00 2001 From: Felix Wang Date: Wed, 6 Jul 2022 12:36:15 -0700 Subject: [PATCH 18/73] chore: Clean up push source tests (#2912) * Delete unnecessary unit test Signed-off-by: Felix Wang * Clean up python feature server test Signed-off-by: Felix Wang * Clean up push source offline retrieval test Signed-off-by: Felix Wang * Clean up push source online retrieval test Signed-off-by: Felix Wang * Clean up offline write tests Signed-off-by: Felix Wang * Add back reorder columns test for offline write Signed-off-by: Felix Wang * Rename create_dataset Signed-off-by: Felix Wang * Add SFV back into online retrieval test Signed-off-by: Felix Wang --- sdk/python/tests/conftest.py | 4 +- sdk/python/tests/data/data_creator.py | 2 +- .../e2e/test_python_feature_server.py | 42 +++---- .../offline_store/test_offline_write.py | 83 ++++++------- ...line_retrieval.py => test_push_offline.py} | 25 ++-- ...nline_retrieval.py => test_push_online.py} | 23 ++-- .../online_store/test_universal_online.py | 109 +++++------------- .../registration/test_universal_types.py | 6 +- .../tests/unit/test_registry_conflict.py | 23 ---- 9 files changed, 103 insertions(+), 214 deletions(-) rename sdk/python/tests/integration/offline_store/{test_push_offline_retrieval.py => test_push_offline.py} (65%) rename sdk/python/tests/integration/online_store/{test_push_online_retrieval.py => test_push_online.py} (55%) delete mode 100644 sdk/python/tests/unit/test_registry_conflict.py diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index 5fe9b5b699..d77d7e82fd 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -26,7 +26,7 @@ from feast import FeatureStore from feast.wait import wait_retry_backoff -from tests.data.data_creator import create_dataset +from tests.data.data_creator import create_basic_driver_dataset from tests.integration.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) @@ -351,7 +351,7 @@ def universal_data_sources(environment) -> TestData: @pytest.fixture def e2e_data_sources(environment: Environment): - df = create_dataset() + df = create_basic_driver_dataset() data_source = environment.data_source_creator.create_data_source( df, environment.feature_store.project, field_mapping={"ts_1": "ts"}, ) diff --git a/sdk/python/tests/data/data_creator.py b/sdk/python/tests/data/data_creator.py index 186c39b9ef..2155468445 100644 --- a/sdk/python/tests/data/data_creator.py +++ b/sdk/python/tests/data/data_creator.py @@ -7,7 +7,7 @@ from feast.types import FeastType, Float32, Int32, Int64, String -def create_dataset( +def create_basic_driver_dataset( entity_type: FeastType = Int32, feature_dtype: str = None, feature_is_list: bool = False, diff --git a/sdk/python/tests/integration/e2e/test_python_feature_server.py b/sdk/python/tests/integration/e2e/test_python_feature_server.py index 7195594d02..ac098d3f29 100644 --- a/sdk/python/tests/integration/e2e/test_python_feature_server.py +++ b/sdk/python/tests/integration/e2e/test_python_feature_server.py @@ -7,13 +7,8 @@ from feast.feast_object import FeastObject from feast.feature_server import get_app -from tests.integration.feature_repos.integration_test_repo_config import ( - IntegrationTestRepoConfig, -) from tests.integration.feature_repos.repo_configuration import ( - construct_test_environment, construct_universal_feature_views, - construct_universal_test_data, ) from tests.integration.feature_repos.universal.entities import ( customer, @@ -63,16 +58,13 @@ def test_get_online_features(python_fs_client): @pytest.mark.integration @pytest.mark.universal_online_stores def test_push(python_fs_client): - # TODO(felixwang9817): Note that we choose an entity value of 102 here since it is not included - # in the existing range of entity values (1-49). This allows us to push data for this test - # without affecting other tests. This decision is tech debt, and should be resolved by finding a - # better way to isolate data sources across tests. + initial_temp = get_temperatures(python_fs_client, location_ids=[1])[0] json_data = json.dumps( { "push_source_name": "location_stats_push_source", "df": { - "location_id": [102], - "temperature": [4], + "location_id": [1], + "temperature": [initial_temp * 100], "event_timestamp": [str(datetime.utcnow())], "created": [str(datetime.utcnow())], }, @@ -82,7 +74,7 @@ def test_push(python_fs_client): # Check new pushed temperature is fetched assert response.status_code == 200 - assert get_temperatures(python_fs_client, location_ids=[102]) == [4] + assert get_temperatures(python_fs_client, location_ids=[1]) == [initial_temp * 100] def get_temperatures(client, location_ids: List[int]): @@ -102,20 +94,14 @@ def get_temperatures(client, location_ids: List[int]): @pytest.fixture -def python_fs_client(request): - config = IntegrationTestRepoConfig() - environment = construct_test_environment(config, fixture_request=request) +def python_fs_client(environment, universal_data_sources, request): fs = environment.feature_store - try: - entities, datasets, data_sources = construct_universal_test_data(environment) - feature_views = construct_universal_feature_views(data_sources) - feast_objects: List[FeastObject] = [] - feast_objects.extend(feature_views.values()) - feast_objects.extend([driver(), customer(), location()]) - fs.apply(feast_objects) - fs.materialize(environment.start_date, environment.end_date) - client = TestClient(get_app(fs)) - yield client - finally: - fs.teardown() - environment.data_source_creator.teardown() + entities, datasets, data_sources = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + feast_objects: List[FeastObject] = [] + feast_objects.extend(feature_views.values()) + feast_objects.extend([driver(), customer(), location()]) + fs.apply(feast_objects) + fs.materialize(environment.start_date, environment.end_date) + client = TestClient(get_app(fs)) + yield client diff --git a/sdk/python/tests/integration/offline_store/test_offline_write.py b/sdk/python/tests/integration/offline_store/test_offline_write.py index 3335da0df7..964793ffaa 100644 --- a/sdk/python/tests/integration/offline_store/test_offline_write.py +++ b/sdk/python/tests/integration/offline_store/test_offline_write.py @@ -7,52 +7,54 @@ from feast import FeatureView, Field from feast.types import Float32, Int32 +from tests.integration.feature_repos.repo_configuration import ( + construct_universal_feature_views, +) from tests.integration.feature_repos.universal.entities import driver -# TODO(felixwang9817): Add a unit test that checks that write_to_offline_store can reorder columns. -# This should only happen after https://github.com/feast-dev/feast/issues/2797 is fixed. - @pytest.mark.integration @pytest.mark.universal_offline_stores -@pytest.mark.universal_online_stores(only=["sqlite"]) -def test_writing_incorrect_schema_fails(environment, universal_data_sources): - """Tests that writing a dataframe with an incorrect schema fails.""" +def test_reorder_columns(environment, universal_data_sources): + """Tests that a dataframe with columns in the wrong order is reordered.""" store = environment.feature_store _, _, data_sources = universal_data_sources - driver_entity = driver() - driver_stats = FeatureView( - name="driver_stats", - entities=[driver_entity], - schema=[ - Field(name="avg_daily_trips", dtype=Int32), - Field(name="conv_rate", dtype=Float32), - Field(name="acc_rate", dtype=Float32), - ], - source=data_sources.driver, - ) + feature_views = construct_universal_feature_views(data_sources) + driver_fv = feature_views.driver + store.apply([driver(), driver_fv]) now = datetime.utcnow() ts = pd.Timestamp(now).round("ms") - entity_df = pd.DataFrame.from_dict( - {"driver_id": [1001, 1002], "event_timestamp": [ts - timedelta(hours=3), ts]} + # This dataframe has columns in the wrong order. + df_to_write = pd.DataFrame.from_dict( + { + "avg_daily_trips": [random.randint(0, 10), random.randint(0, 10)], + "created": [ts, ts], + "conv_rate": [random.random(), random.random()], + "event_timestamp": [ts, ts], + "acc_rate": [random.random(), random.random()], + "driver_id": [1001, 1001], + }, ) - store.apply([driver_entity, driver_stats]) - df = store.get_historical_features( - entity_df=entity_df, - features=[ - "driver_stats:conv_rate", - "driver_stats:acc_rate", - "driver_stats:avg_daily_trips", - ], - full_feature_names=False, - ).to_df() + store.write_to_offline_store( + driver_fv.name, df_to_write, allow_registry_cache=False + ) - assert df["conv_rate"].isnull().all() - assert df["acc_rate"].isnull().all() - assert df["avg_daily_trips"].isnull().all() + +@pytest.mark.integration +@pytest.mark.universal_offline_stores +def test_writing_incorrect_schema_fails(environment, universal_data_sources): + """Tests that writing a dataframe with an incorrect schema fails.""" + store = environment.feature_store + _, _, data_sources = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + driver_fv = feature_views.driver + store.apply([driver(), driver_fv]) + + now = datetime.utcnow() + ts = pd.Timestamp(now).round("ms") expected_df = pd.DataFrame.from_dict( { @@ -65,13 +67,12 @@ def test_writing_incorrect_schema_fails(environment, universal_data_sources): ) with pytest.raises(ValueError): store.write_to_offline_store( - driver_stats.name, expected_df, allow_registry_cache=False + driver_fv.name, expected_df, allow_registry_cache=False ) @pytest.mark.integration @pytest.mark.universal_offline_stores -@pytest.mark.universal_online_stores(only=["sqlite"]) def test_writing_consecutively_to_offline_store(environment, universal_data_sources): store = environment.feature_store _, _, data_sources = universal_data_sources @@ -96,7 +97,7 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour entity_df = pd.DataFrame.from_dict( { "driver_id": [1001, 1001], - "event_timestamp": [ts - timedelta(hours=4), ts - timedelta(hours=3)], + "event_timestamp": [ts + timedelta(hours=3), ts + timedelta(hours=4)], } ) @@ -117,7 +118,7 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour first_df = pd.DataFrame.from_dict( { - "event_timestamp": [ts - timedelta(hours=4), ts - timedelta(hours=3)], + "event_timestamp": [ts + timedelta(hours=3), ts + timedelta(hours=4)], "driver_id": [1001, 1001], "conv_rate": [random.random(), random.random()], "acc_rate": [random.random(), random.random()], @@ -155,7 +156,7 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour second_df = pd.DataFrame.from_dict( { - "event_timestamp": [ts - timedelta(hours=1), ts], + "event_timestamp": [ts + timedelta(hours=5), ts + timedelta(hours=6)], "driver_id": [1001, 1001], "conv_rate": [random.random(), random.random()], "acc_rate": [random.random(), random.random()], @@ -172,10 +173,10 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour { "driver_id": [1001, 1001, 1001, 1001], "event_timestamp": [ - ts - timedelta(hours=4), - ts - timedelta(hours=3), - ts - timedelta(hours=1), - ts, + ts + timedelta(hours=3), + ts + timedelta(hours=4), + ts + timedelta(hours=5), + ts + timedelta(hours=6), ], } ) diff --git a/sdk/python/tests/integration/offline_store/test_push_offline_retrieval.py b/sdk/python/tests/integration/offline_store/test_push_offline.py similarity index 65% rename from sdk/python/tests/integration/offline_store/test_push_offline_retrieval.py rename to sdk/python/tests/integration/offline_store/test_push_offline.py index 23bb0f98a7..0b1db9011a 100644 --- a/sdk/python/tests/integration/offline_store/test_push_offline_retrieval.py +++ b/sdk/python/tests/integration/offline_store/test_push_offline.py @@ -8,25 +8,20 @@ from tests.integration.feature_repos.repo_configuration import ( construct_universal_feature_views, ) -from tests.integration.feature_repos.universal.entities import ( - customer, - driver, - location, -) +from tests.integration.feature_repos.universal.entities import location @pytest.mark.integration @pytest.mark.universal_offline_stores -@pytest.mark.universal_online_stores(only=["sqlite"]) -def test_push_features_and_read_from_offline_store(environment, universal_data_sources): +def test_push_features_and_read(environment, universal_data_sources): store = environment.feature_store - - (_, _, data_sources) = universal_data_sources + _, _, data_sources = universal_data_sources feature_views = construct_universal_feature_views(data_sources) - now = pd.Timestamp(datetime.datetime.utcnow()).round("ms") + location_fv = feature_views.pushed_locations + store.apply([location(), location_fv]) - store.apply([driver(), customer(), location(), *feature_views.values()]) - entity_df = pd.DataFrame.from_dict({"location_id": [100], "event_timestamp": [now]}) + now = pd.Timestamp(datetime.datetime.utcnow()).round("ms") + entity_df = pd.DataFrame.from_dict({"location_id": [1], "event_timestamp": [now]}) before_df = store.get_historical_features( entity_df=entity_df, @@ -34,13 +29,9 @@ def test_push_features_and_read_from_offline_store(environment, universal_data_s full_feature_names=False, ).to_df() - # TODO(felixwang9817): Note that we choose an entity value of 100 here since it is not included - # in the existing range of entity values (1-49). This allows us to push data for this test - # without affecting other tests. This decision is tech debt, and should be resolved by finding a - # better way to isolate data sources across tests. data = { "event_timestamp": [now], - "location_id": [100], + "location_id": [1], "temperature": [4], "created": [now], } diff --git a/sdk/python/tests/integration/online_store/test_push_online_retrieval.py b/sdk/python/tests/integration/online_store/test_push_online.py similarity index 55% rename from sdk/python/tests/integration/online_store/test_push_online_retrieval.py rename to sdk/python/tests/integration/online_store/test_push_online.py index 436f87715f..42561563f9 100644 --- a/sdk/python/tests/integration/online_store/test_push_online_retrieval.py +++ b/sdk/python/tests/integration/online_store/test_push_online.py @@ -6,29 +6,20 @@ from tests.integration.feature_repos.repo_configuration import ( construct_universal_feature_views, ) -from tests.integration.feature_repos.universal.entities import ( - customer, - driver, - location, -) +from tests.integration.feature_repos.universal.entities import location @pytest.mark.integration @pytest.mark.universal_online_stores def test_push_features_and_read(environment, universal_data_sources): store = environment.feature_store - - (_, datasets, data_sources) = universal_data_sources + _, _, data_sources = universal_data_sources feature_views = construct_universal_feature_views(data_sources) + location_fv = feature_views.pushed_locations + store.apply([location(), location_fv]) - store.apply([driver(), customer(), location(), *feature_views.values()]) - - # TODO(felixwang9817): Note that we choose an entity value of 101 here since it is not included - # in the existing range of entity values (1-49). This allows us to push data for this test - # without affecting other tests. This decision is tech debt, and should be resolved by finding a - # better way to isolate data sources across tests. data = { - "location_id": [101], + "location_id": [1], "temperature": [4], "event_timestamp": [pd.Timestamp(datetime.datetime.utcnow()).round("ms")], "created": [pd.Timestamp(datetime.datetime.utcnow()).round("ms")], @@ -39,8 +30,8 @@ def test_push_features_and_read(environment, universal_data_sources): online_resp = store.get_online_features( features=["pushable_location_stats:temperature"], - entity_rows=[{"location_id": 101}], + entity_rows=[{"location_id": 1}], ) online_resp_dict = online_resp.to_dict() - assert online_resp_dict["location_id"] == [101] + assert online_resp_dict["location_id"] == [1] assert online_resp_dict["temperature"] == [4] diff --git a/sdk/python/tests/integration/online_store/test_universal_online.py b/sdk/python/tests/integration/online_store/test_universal_online.py index b01448e7cc..228069c13c 100644 --- a/sdk/python/tests/integration/online_store/test_universal_online.py +++ b/sdk/python/tests/integration/online_store/test_universal_online.py @@ -441,82 +441,6 @@ def test_online_retrieval_with_event_timestamps( ) -@pytest.mark.integration -@pytest.mark.universal_online_stores -@pytest.mark.goserver -@pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) -def test_stream_feature_view_online_retrieval( - environment, universal_data_sources, feature_server_endpoint, full_feature_names -): - """ - Tests materialization and online retrieval for stream feature views. - - This test is separate from test_online_retrieval since combining feature views and - stream feature views into a single test resulted in test flakiness. This is tech - debt that should be resolved soon. - """ - # Set up feature store. - fs = environment.feature_store - entities, datasets, data_sources = universal_data_sources - feature_views = construct_universal_feature_views(data_sources) - pushable_feature_view = feature_views.pushed_locations - fs.apply([location(), pushable_feature_view]) - - # Materialize. - fs.materialize( - environment.start_date - timedelta(days=1), - environment.end_date + timedelta(days=1), - ) - - # Get online features by randomly sampling 10 entities that exist in the batch source. - sample_locations = datasets.location_df.sample(10)["location_id"] - entity_rows = [ - {"location_id": sample_location} for sample_location in sample_locations - ] - - feature_refs = [ - "pushable_location_stats:temperature", - ] - unprefixed_feature_refs = [f.rsplit(":", 1)[-1] for f in feature_refs if ":" in f] - - online_features_dict = get_online_features_dict( - environment=environment, - endpoint=feature_server_endpoint, - features=feature_refs, - entity_rows=entity_rows, - full_feature_names=full_feature_names, - ) - - # Check that the response has the expected set of keys. - keys = set(online_features_dict.keys()) - expected_keys = set( - f.replace(":", "__") if full_feature_names else f.split(":")[-1] - for f in feature_refs - ) | {"location_id"} - assert ( - keys == expected_keys - ), f"Response keys are different from expected: {keys - expected_keys} (extra) and {expected_keys - keys} (missing)" - - # Check that the feature values match. - tc = unittest.TestCase() - for i, entity_row in enumerate(entity_rows): - df_features = get_latest_feature_values_from_location_df( - entity_row, datasets.location_df - ) - - assert df_features["location_id"] == online_features_dict["location_id"][i] - for unprefixed_feature_ref in unprefixed_feature_refs: - tc.assertAlmostEqual( - df_features[unprefixed_feature_ref], - online_features_dict[ - response_feature_name( - unprefixed_feature_ref, feature_refs, full_feature_names - ) - ][i], - delta=0.0001, - ) - - @pytest.mark.integration @pytest.mark.universal_online_stores @pytest.mark.goserver @@ -534,6 +458,7 @@ def test_online_retrieval( feature_views.driver[["conv_rate"]], feature_views.driver_odfv, feature_views.customer[["current_balance"]], + feature_views.pushed_locations, ], ) feature_service_entity_mapping = FeatureService( @@ -566,7 +491,7 @@ def test_online_retrieval( ) entity_sample = datasets.orders_df.sample(10)[ - ["customer_id", "driver_id", "order_id", "event_timestamp"] + ["customer_id", "driver_id", "order_id", "origin_id", "event_timestamp"] ] orders_df = datasets.orders_df[ ( @@ -585,6 +510,8 @@ def test_online_retrieval( datasets.customer_df["customer_id"].isin(sample_customers) ] + sample_origins = entity_sample["origin_id"] + location_pairs = np.array(list(itertools.permutations(entities.location_vals, 2))) sample_location_pairs = location_pairs[ np.random.choice(len(location_pairs), 10) @@ -597,10 +524,11 @@ def test_online_retrieval( ] global_df = datasets.global_df + location_df = datasets.location_df entity_rows = [ - {"driver_id": d, "customer_id": c, "val_to_add": 50} - for (d, c) in zip(sample_drivers, sample_customers) + {"driver_id": d, "customer_id": c, "location_id": o, "val_to_add": 50} + for (d, c, o) in zip(sample_drivers, sample_customers, sample_origins) ] feature_refs = [ @@ -614,6 +542,7 @@ def test_online_retrieval( "order:order_is_success", "global_stats:num_rides", "global_stats:avg_ride_length", + "pushable_location_stats:temperature", ] unprefixed_feature_refs = [f.rsplit(":", 1)[-1] for f in feature_refs if ":" in f] # Remove the on demand feature view output features, since they're not present in the source dataframe @@ -644,7 +573,7 @@ def test_online_retrieval( expected_keys = set( f.replace(":", "__") if full_feature_names else f.split(":")[-1] for f in feature_refs - ) | {"customer_id", "driver_id"} + ) | {"customer_id", "driver_id", "location_id"} assert ( keys == expected_keys ), f"Response keys are different from expected: {keys - expected_keys} (extra) and {expected_keys - keys} (missing)" @@ -657,6 +586,7 @@ def test_online_retrieval( orders_df=orders_df, global_df=global_df, entity_row=entity_row, + location_df=location_df, ) assert df_features["customer_id"] == online_features_dict["customer_id"][i] @@ -695,7 +625,9 @@ def test_online_retrieval( environment=environment, endpoint=feature_server_endpoint, features=feature_refs, - entity_rows=[{"driver_id": 0, "customer_id": 0, "val_to_add": 100}], + entity_rows=[ + {"driver_id": 0, "customer_id": 0, "location_id": 0, "val_to_add": 100} + ], full_feature_names=full_feature_names, ) assert missing_responses_dict is not None @@ -715,7 +647,7 @@ def test_online_retrieval( environment=environment, endpoint=feature_server_endpoint, features=feature_refs, - entity_rows=[{"driver_id": 0, "customer_id": 0}], + entity_rows=[{"driver_id": 0, "customer_id": 0, "location_id": 0}], full_feature_names=full_feature_names, ) @@ -729,6 +661,7 @@ def test_online_retrieval( customers_df, orders_df, global_df, + location_df, ) entity_rows = [ @@ -857,6 +790,7 @@ def get_latest_feature_values_from_dataframes( orders_df, entity_row, global_df=None, + location_df=None, origin_df=None, destination_df=None, ): @@ -864,6 +798,9 @@ def get_latest_feature_values_from_dataframes( latest_customer_row = get_latest_row( entity_row, customer_df, "customer_id", "customer_id" ) + latest_location_row = get_latest_row( + entity_row, location_df, "location_id", "location_id", + ) # Since the event timestamp columns may contain timestamps of different timezones, # we must first convert the timestamps to UTC before we can compare them. @@ -883,7 +820,7 @@ def get_latest_feature_values_from_dataframes( global_df["event_timestamp"].idxmax() ].to_dict() if origin_df is not None: - latest_location_row = get_latest_feature_values_for_location_df( + latest_location_aliased_row = get_latest_feature_values_for_location_df( entity_row, origin_df, destination_df ) @@ -896,6 +833,7 @@ def get_latest_feature_values_from_dataframes( **latest_driver_row, **latest_orders_row, **latest_global_row, + **latest_location_row, **request_data_features, } if origin_df is not None: @@ -906,12 +844,14 @@ def get_latest_feature_values_from_dataframes( **latest_driver_row, **latest_orders_row, **latest_location_row, + **latest_location_aliased_row, **request_data_features, } return { **latest_customer_row, **latest_driver_row, **latest_orders_row, + **latest_location_row, **request_data_features, } @@ -949,6 +889,7 @@ def assert_feature_service_correctness( customers_df, orders_df, global_df, + location_df, ): feature_service_online_features_dict = get_online_features_dict( environment=environment, @@ -968,6 +909,7 @@ def assert_feature_service_correctness( assert set(feature_service_keys) == set(expected_feature_refs) | { "customer_id", "driver_id", + "location_id", } tc = unittest.TestCase() @@ -978,6 +920,7 @@ def assert_feature_service_correctness( orders_df=orders_df, global_df=global_df, entity_row=entity_row, + location_df=location_df, ) tc.assertAlmostEqual( feature_service_online_features_dict[ diff --git a/sdk/python/tests/integration/registration/test_universal_types.py b/sdk/python/tests/integration/registration/test_universal_types.py index d5cf270b25..938773b7b7 100644 --- a/sdk/python/tests/integration/registration/test_universal_types.py +++ b/sdk/python/tests/integration/registration/test_universal_types.py @@ -19,7 +19,7 @@ String, UnixTimestamp, ) -from tests.data.data_creator import create_dataset +from tests.data.data_creator import create_basic_driver_dataset from tests.integration.feature_repos.universal.entities import driver from tests.integration.feature_repos.universal.feature_views import driver_feature_view @@ -91,7 +91,7 @@ def get_fixtures(request, environment): ".", "" ).lower() config = request.param - df = create_dataset( + df = create_basic_driver_dataset( Int64, config.feature_dtype, config.feature_is_list, config.has_empty_list, ) data_source = environment.data_source_creator.create_data_source( @@ -115,7 +115,7 @@ def test_entity_inference_types_match(environment, entity_type): fs = environment.feature_store # Don't specify value type in entity to force inference - df = create_dataset(entity_type, feature_dtype="int32",) + df = create_basic_driver_dataset(entity_type, feature_dtype="int32",) data_source = environment.data_source_creator.create_data_source( df, destination_name=f"entity_type_{entity_type.name.lower()}", diff --git a/sdk/python/tests/unit/test_registry_conflict.py b/sdk/python/tests/unit/test_registry_conflict.py deleted file mode 100644 index 12f666556a..0000000000 --- a/sdk/python/tests/unit/test_registry_conflict.py +++ /dev/null @@ -1,23 +0,0 @@ -import pytest - -from feast.entity import Entity - - -@pytest.mark.integration -def test_apply_first_entity(environment): - entity = Entity(name="first") - fs = environment.feature_store - fs.apply([entity]) - - entities = fs.list_entities() - assert len(entities) == 1 - - -@pytest.mark.integration -def test_apply_second_entity(environment): - entity = Entity(name="second") - fs = environment.feature_store - fs.apply([entity]) - - entities = fs.list_entities() - assert len(entities) == 1 From 109ee9cff5bcda46889583f2968003f6a3e375b3 Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Wed, 6 Jul 2022 12:48:13 -0700 Subject: [PATCH 19/73] feat: Add to_remote_storage method to RetrievalJob (#2916) * feat: Add to_remote_storage method to RetrievalJob Signed-off-by: Achal Shah * docstring Signed-off-by: Achal Shah --- .../infra/offline_stores/offline_store.py | 20 +++++++++++++++++++ .../test_universal_historical_retrieval.py | 4 ++-- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/offline_store.py b/sdk/python/feast/infra/offline_stores/offline_store.py index 439911fe2a..e24317e859 100644 --- a/sdk/python/feast/infra/offline_stores/offline_store.py +++ b/sdk/python/feast/infra/offline_stores/offline_store.py @@ -163,6 +163,26 @@ def metadata(self) -> Optional[RetrievalMetadata]: """ pass + def supports_remote_storage_export(self) -> bool: + """ + This method should return True if the RetrievalJob supports `to_remote_storage()`. + """ + return False + + def to_remote_storage(self) -> List[str]: + """ + This method should export the result of this RetrievalJob to + remote storage (such as S3, GCS, HDFS, etc). + Implementations of this method should export the results as + multiple parquet files, each file sized appropriately + depending on how much data is being returned by the retrieval + job. + + Returns: + A list of parquet file paths in remote storage. + """ + raise NotImplementedError() + class OfflineStore(ABC): """ diff --git a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py index 2076ab2aed..0b2965084d 100644 --- a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py +++ b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py @@ -280,7 +280,7 @@ def get_expected_training_df( @pytest.mark.integration @pytest.mark.universal_offline_stores -@pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) +@pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: f"full:{v}") def test_historical_features(environment, universal_data_sources, full_feature_names): store = environment.feature_store @@ -410,7 +410,7 @@ def test_historical_features(environment, universal_data_sources, full_feature_n @pytest.mark.integration -@pytest.mark.universal +@pytest.mark.universal_offline_stores @pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) def test_historical_features_with_shared_batch_source( environment, universal_data_sources, full_feature_names From 66038c7aabaf03f4828d31605f7b6ba57857844c Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Thu, 7 Jul 2022 15:22:40 -0700 Subject: [PATCH 20/73] chore: Implement to_remote_storage for supported offline stores (#2918) * chore: Implement to_remote_storage for RedshiftRetrievalJob Signed-off-by: Achal Shah * Implement to_remote_storage for BigQuery Signed-off-by: Achal Shah * add for snowflake as well Signed-off-by: Achal Shah * fully fleshed for snowflake Signed-off-by: Achal Shah * better test config : Signed-off-by: Achal Shah * fix tests: Signed-off-by: Achal Shah * fix tests Signed-off-by: Achal Shah * more fixes Signed-off-by: Achal Shah * more fixes Signed-off-by: Achal Shah * more fixes Signed-off-by: Achal Shah * fix bigquery Signed-off-by: Achal Shah * use temp table for entity df table Signed-off-by: Achal Shah * simplify condition Signed-off-by: Achal Shah * remove temp Signed-off-by: Achal Shah --- .../feast/infra/offline_stores/bigquery.py | 49 ++++++++++++++++++ sdk/python/feast/infra/offline_stores/file.py | 3 ++ .../feast/infra/offline_stores/redshift.py | 7 +++ .../feast/infra/offline_stores/snowflake.py | 51 ++++++++++++++++++- sdk/python/feast/infra/utils/aws_utils.py | 15 +++++- .../universal/data_sources/bigquery.py | 4 +- .../universal/data_sources/snowflake.py | 2 + .../test_universal_historical_retrieval.py | 5 ++ 8 files changed, 131 insertions(+), 5 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index cb5b3a045a..3bf340acf9 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -57,6 +57,7 @@ from google.cloud import bigquery from google.cloud.bigquery import Client, SchemaField, Table from google.cloud.bigquery._pandas_helpers import ARROW_SCALAR_IDS_TO_BQ + from google.cloud.storage import Client as StorageClient except ImportError as e: from feast.errors import FeastExtrasDependencyImportError @@ -83,6 +84,9 @@ class BigQueryOfflineStoreConfig(FeastConfigBaseModel): For more information on BigQuery data locations see: https://cloud.google.com/bigquery/docs/locations """ + gcs_staging_location: Optional[str] = None + """ (optional) GCS location used for offloading BigQuery results as parquet files.""" + class BigQueryOfflineStore(OfflineStore): @staticmethod @@ -386,6 +390,14 @@ def query_generator() -> Iterator[str]: on_demand_feature_views if on_demand_feature_views else [] ) self._metadata = metadata + if self.config.offline_store.gcs_staging_location: + self._gcs_path = ( + self.config.offline_store.gcs_staging_location + + f"/{self.config.project}/export/" + + str(uuid.uuid4()) + ) + else: + self._gcs_path = None @property def full_feature_names(self) -> bool: @@ -478,6 +490,43 @@ def persist(self, storage: SavedDatasetStorage): def metadata(self) -> Optional[RetrievalMetadata]: return self._metadata + def supports_remote_storage_export(self) -> bool: + return self._gcs_path is not None + + def to_remote_storage(self) -> List[str]: + if not self._gcs_path: + raise ValueError( + "gcs_staging_location needs to be specified for the big query " + "offline store when executing `to_remote_storage()`" + ) + + table = self.to_bigquery() + + job_config = bigquery.job.ExtractJobConfig() + job_config.destination_format = "PARQUET" + + extract_job = self.client.extract_table( + table, + destination_uris=[f"{self._gcs_path}/*.parquet"], + location=self.config.offline_store.location, + job_config=job_config, + ) + extract_job.result() + + bucket: str + prefix: str + storage_client = StorageClient(project=self.client.project) + bucket, prefix = self._gcs_path[len("gs://") :].split("/", 1) + prefix = prefix.rsplit("/", 1)[0] + if prefix.startswith("/"): + prefix = prefix[1:] + + blobs = storage_client.list_blobs(bucket, prefix=prefix) + results = [] + for b in blobs: + results.append(f"gs://{b.bucket.name}/{b.name}") + return results + def block_until_done( client: Client, diff --git a/sdk/python/feast/infra/offline_stores/file.py b/sdk/python/feast/infra/offline_stores/file.py index d60d468174..ae98f8d0c2 100644 --- a/sdk/python/feast/infra/offline_stores/file.py +++ b/sdk/python/feast/infra/offline_stores/file.py @@ -105,6 +105,9 @@ def persist(self, storage: SavedDatasetStorage): def metadata(self) -> Optional[RetrievalMetadata]: return self._metadata + def supports_remote_storage_export(self) -> bool: + return False + class FileOfflineStore(OfflineStore): @staticmethod diff --git a/sdk/python/feast/infra/offline_stores/redshift.py b/sdk/python/feast/infra/offline_stores/redshift.py index 5f071a814f..1d7b79727e 100644 --- a/sdk/python/feast/infra/offline_stores/redshift.py +++ b/sdk/python/feast/infra/offline_stores/redshift.py @@ -490,6 +490,13 @@ def persist(self, storage: SavedDatasetStorage): def metadata(self) -> Optional[RetrievalMetadata]: return self._metadata + def supports_remote_storage_export(self) -> bool: + return True + + def to_remote_storage(self) -> List[str]: + path = self.to_s3() + return aws_utils.list_s3_files(self._config.offline_store.region, path) + def _upload_entity_df( entity_df: Union[pd.DataFrame, str], diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index a5befc33e2..71394c4403 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -1,5 +1,6 @@ import contextlib import os +import uuid from datetime import datetime from pathlib import Path from typing import ( @@ -90,6 +91,12 @@ class SnowflakeOfflineStoreConfig(FeastConfigBaseModel): schema_: Optional[str] = Field(None, alias="schema") """ Snowflake schema name """ + storage_integration_name: Optional[str] = None + """ Storage integration name in snowflake """ + + blob_export_location: Optional[str] = None + """ Location (in S3, Google storage or Azure storage) where data is offloaded """ + class Config: allow_population_by_field_name = True @@ -378,6 +385,11 @@ def query_generator() -> Iterator[str]: on_demand_feature_views if on_demand_feature_views else [] ) self._metadata = metadata + self.export_path: Optional[str] + if self.config.offline_store.blob_export_location: + self.export_path = f"{self.config.offline_store.blob_export_location}/{self.config.project}/{uuid.uuid4()}" + else: + self.export_path = None @property def full_feature_names(self) -> bool: @@ -413,7 +425,7 @@ def _to_arrow_internal(self) -> pa.Table: pd.DataFrame(columns=[md.name for md in empty_result.description]) ) - def to_snowflake(self, table_name: str) -> None: + def to_snowflake(self, table_name: str, temporary=False) -> None: """Save dataset as a new Snowflake table""" if self.on_demand_feature_views is not None: transformed_df = self.to_df() @@ -425,7 +437,7 @@ def to_snowflake(self, table_name: str) -> None: return None with self._query_generator() as query: - query = f'CREATE TABLE IF NOT EXISTS "{table_name}" AS ({query});\n' + query = f'CREATE {"TEMPORARY" if temporary else ""} TABLE IF NOT EXISTS "{table_name}" AS ({query});\n' execute_snowflake_statement(self.snowflake_conn, query) @@ -453,6 +465,41 @@ def persist(self, storage: SavedDatasetStorage): def metadata(self) -> Optional[RetrievalMetadata]: return self._metadata + def supports_remote_storage_export(self) -> bool: + return ( + self.config.offline_store.storage_integration_name + and self.config.offline_store.blob_export_location + ) + + def to_remote_storage(self) -> List[str]: + if not self.export_path: + raise ValueError( + "to_remote_storage() requires `blob_export_location` to be specified in config" + ) + if not self.config.offline_store.storage_integration_name: + raise ValueError( + "to_remote_storage() requires `storage_integration_name` to be specified in config" + ) + + table = f"temporary_{uuid.uuid4().hex}" + self.to_snowflake(table) + + copy_into_query = f"""copy into '{self.config.offline_store.blob_export_location}/{table}' from "{self.config.offline_store.database}"."{self.config.offline_store.schema_}"."{table}"\n + storage_integration = {self.config.offline_store.storage_integration_name}\n + file_format = (TYPE = PARQUET)\n + DETAILED_OUTPUT = TRUE\n + HEADER = TRUE;\n + """ + + cursor = execute_snowflake_statement(self.snowflake_conn, copy_into_query) + all_rows = ( + cursor.fetchall() + ) # This may be need pagination at some point in the future. + file_name_column_index = [ + idx for idx, rm in enumerate(cursor.description) if rm.name == "FILE_NAME" + ][0] + return [f"{self.export_path}/{row[file_name_column_index]}" for row in all_rows] + def _get_entity_schema( entity_df: Union[pd.DataFrame, str], diff --git a/sdk/python/feast/infra/utils/aws_utils.py b/sdk/python/feast/infra/utils/aws_utils.py index 7badda9846..51aecbf8a7 100644 --- a/sdk/python/feast/infra/utils/aws_utils.py +++ b/sdk/python/feast/infra/utils/aws_utils.py @@ -3,7 +3,7 @@ import tempfile import uuid from pathlib import Path -from typing import Any, Dict, Iterator, Optional, Tuple, Union +from typing import Any, Dict, Iterator, List, Optional, Tuple, Union import pandas as pd import pyarrow @@ -473,7 +473,7 @@ def execute_redshift_query_and_unload_to_s3( # Run the query, unload the results to S3 unique_table_name = "_" + str(uuid.uuid4()).replace("-", "") query = f"CREATE TEMPORARY TABLE {unique_table_name} AS ({query});\n" - query += f"UNLOAD ('SELECT * FROM {unique_table_name}') TO '{s3_path}/' IAM_ROLE '{iam_role}' PARQUET" + query += f"UNLOAD ('SELECT * FROM {unique_table_name}') TO '{s3_path}/' IAM_ROLE '{iam_role}' FORMAT AS PARQUET" execute_redshift_statement(redshift_data_client, cluster_id, database, user, query) @@ -632,3 +632,14 @@ def delete_api_gateway(api_gateway_client, api_gateway_id: str) -> Dict: def get_account_id() -> str: """Get AWS Account ID""" return boto3.client("sts").get_caller_identity().get("Account") + + +def list_s3_files(aws_region: str, path: str) -> List[str]: + s3 = boto3.client("s3", config=Config(region_name=aws_region)) + if path.startswith("s3://"): + path = path[len("s3://") :] + bucket, prefix = path.split("/", 1) + objects = s3.list_objects_v2(Bucket=bucket, Prefix=prefix) + contents = objects["Contents"] + files = [f"s3://{bucket}/{content['Key']}" for content in contents] + return files diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py index 620f444159..83bc1ef308 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py @@ -51,7 +51,9 @@ def teardown(self): self.dataset = None def create_offline_store_config(self): - return BigQueryOfflineStoreConfig() + return BigQueryOfflineStoreConfig( + location="US", gcs_staging_location="gs://feast-export/" + ) def create_data_source( self, diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py index 23466bc00c..ae83ea8eb0 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py @@ -34,6 +34,8 @@ def __init__(self, project_name: str, *args, **kwargs): warehouse=os.environ["SNOWFLAKE_CI_WAREHOUSE"], database="FEAST", schema="OFFLINE", + storage_integration_name="FEAST_S3", + blob_export_location="s3://feast-snowflake-offload/export", ) def create_data_source( diff --git a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py index 0b2965084d..abaf1622c0 100644 --- a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py +++ b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py @@ -370,6 +370,11 @@ def test_historical_features(environment, universal_data_sources, full_feature_n full_feature_names=full_feature_names, ) + if job_from_df.supports_remote_storage_export(): + files = job_from_df.to_remote_storage() + print(files) + assert len(files) > 0 # This test should be way more detailed + start_time = datetime.utcnow() actual_df_from_df_entities = job_from_df.to_df() From 495f5f0e42e8695aeb5b7b92a9cc3640c015cce7 Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Fri, 8 Jul 2022 14:35:36 -0400 Subject: [PATCH 21/73] chore: Update Feast UI dependency to reflect recent changes (#2924) chore: Update Feast UI dependency Signed-off-by: Danny Chiao --- sdk/python/feast/ui/package.json | 2 +- sdk/python/feast/ui/yarn.lock | 8 ++++---- ui/package.json | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sdk/python/feast/ui/package.json b/sdk/python/feast/ui/package.json index 556637aaae..ec04655723 100644 --- a/sdk/python/feast/ui/package.json +++ b/sdk/python/feast/ui/package.json @@ -6,7 +6,7 @@ "@elastic/datemath": "^5.0.3", "@elastic/eui": "^57.0.0", "@emotion/react": "^11.9.0", - "@feast-dev/feast-ui": "^0.20.4", + "@feast-dev/feast-ui": "^0.20.5", "@testing-library/jest-dom": "^5.16.4", "@testing-library/react": "^13.2.0", "@testing-library/user-event": "^13.5.0", diff --git a/sdk/python/feast/ui/yarn.lock b/sdk/python/feast/ui/yarn.lock index f6301957c8..83157394ca 100644 --- a/sdk/python/feast/ui/yarn.lock +++ b/sdk/python/feast/ui/yarn.lock @@ -1345,10 +1345,10 @@ minimatch "^3.1.2" strip-json-comments "^3.1.1" -"@feast-dev/feast-ui@^0.20.4": - version "0.20.4" - resolved "https://registry.yarnpkg.com/@feast-dev/feast-ui/-/feast-ui-0.20.4.tgz#4b918f8922f3eecd9e3e7323f25ba9cac78a4567" - integrity sha512-KTUhKni7t++G6UwXyPbGWXwWHnTOVTH8ouYCoHXbGorgRL3K4fbq5tCSCJzP9L5FAo+cF1AjVZNRgwzPe6vAgA== +"@feast-dev/feast-ui@^0.20.5": + version "0.20.5" + resolved "https://registry.yarnpkg.com/@feast-dev/feast-ui/-/feast-ui-0.20.5.tgz#bb0d6fc81cbd92ca69b779982ab151a8d9cabaee" + integrity sha512-BwMPJSv1MkylHxPnU/2fZX77AC/G4H2DIf+HAj80ZklwB0zbmeZzhXFrVh4xSheevGZFh0L839JeL14WfXPZsA== dependencies: "@elastic/datemath" "^5.0.3" "@elastic/eui" "^55.0.1" diff --git a/ui/package.json b/ui/package.json index 252faf8613..22128cc968 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,6 +1,6 @@ { "name": "@feast-dev/feast-ui", - "version": "0.20.4", + "version": "0.20.5", "private": false, "files": [ "dist" From 9ae22a17cc358b59dfa8a117a057fbd15a685076 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Jul 2022 12:31:39 -0700 Subject: [PATCH 22/73] chore: Bump moment from 2.29.2 to 2.29.4 in /ui (#2926) chore(deps): bump moment from 2.29.2 to 2.29.4 in /ui Bumps [moment](https://github.com/moment/moment) from 2.29.2 to 2.29.4. - [Release notes](https://github.com/moment/moment/releases) - [Changelog](https://github.com/moment/moment/blob/develop/CHANGELOG.md) - [Commits](https://github.com/moment/moment/compare/2.29.2...2.29.4) --- updated-dependencies: - dependency-name: moment dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- ui/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ui/yarn.lock b/ui/yarn.lock index 998565a77a..78c069b38a 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -7326,9 +7326,9 @@ mkdirp@^0.5.5, mkdirp@~0.5.1: minimist "^1.2.5" moment@^2.29.1: - version "2.29.2" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.2.tgz#00910c60b20843bcba52d37d58c628b47b1f20e4" - integrity sha512-UgzG4rvxYpN15jgCmVJwac49h9ly9NurikMWGPdVxm8GZD6XjkKPxDTjQQ43gtGgnV3X0cAyWDdP2Wexoquifg== + version "2.29.4" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108" + integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w== ms@2.0.0: version "2.0.0" From 130746ea5cfadad6ef467c0cb0490d4745fdad70 Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Fri, 8 Jul 2022 15:21:38 -0700 Subject: [PATCH 23/73] fix: Fix the go build and use CgoArrowAllocator to prevent incorrect garbage collection (#2919) * Temp fix Signed-off-by: Kevin Zhang * Temp fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * add dynamic linking flags Signed-off-by: Kevin Zhang * Update gitignore Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix workflows Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang --- .github/workflows/build_wheels.yml | 12 ++++++++++ .github/workflows/linter.yml | 18 ++++++++++++-- .github/workflows/master_only.yml | 12 ++++++++++ .github/workflows/pr_integration_tests.yml | 12 ++++++++++ .github/workflows/unit_tests.yml | 24 +++++++++++++++---- Makefile | 19 ++++++++------- .../feature-servers/go-feature-retrieval.md | 16 +++++++++++-- go.mod | 8 +++---- go.sum | 9 ++++--- go/embedded/online_features.go | 2 +- .../onlinestore/sqliteonlinestore_test.go | 4 ++-- setup.py | 5 +++- 12 files changed, 114 insertions(+), 27 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index ea93cc85a4..c69ba4687d 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -62,6 +62,18 @@ jobs: registry-url: 'https://registry.npmjs.org' - name: Build UI run: make build-ui + - name: Install apache-arrow on ubuntu + if: matrix.os == 'ubuntu-latest' + run: | + sudo apt update + sudo apt install -y -V ca-certificates lsb-release wget + wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt update + sudo apt install -y -V libarrow-dev + - name: Install apache-arrow on macos + if: matrix.os == 'macOS-latest' + run: brew install apache-arrow - name: Build wheels uses: pypa/cibuildwheel@v2.7.0 env: diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index a0a6d7dd38..ba475e2585 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -39,6 +39,14 @@ jobs: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- - name: Install pip-tools run: pip install pip-tools + - name: Install apache-arrow on ubuntu + run: | + sudo apt update + sudo apt install -y -V ca-certificates lsb-release wget + wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt update + sudo apt install -y -V libarrow-dev - name: Install dependencies run: | make compile-protos-go @@ -63,7 +71,13 @@ jobs: - name: Upgrade pip version run: | pip install --upgrade "pip>=21.3.1,<22.1" - - name: Install dependencies - run: make install-go-proto-dependencies + - name: Install apache-arrow on ubuntu + run: | + sudo apt update + sudo apt install -y -V ca-certificates lsb-release wget + wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt update + sudo apt install -y -V libarrow-dev - name: Lint go run: make lint-go \ No newline at end of file diff --git a/.github/workflows/master_only.yml b/.github/workflows/master_only.yml index 0cb49bb525..c9ebcdaf04 100644 --- a/.github/workflows/master_only.yml +++ b/.github/workflows/master_only.yml @@ -127,6 +127,18 @@ jobs: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- - name: Install pip-tools run: pip install pip-tools + - name: Install apache-arrow on ubuntu + if: matrix.os == 'ubuntu-latest' + run: | + sudo apt update + sudo apt install -y -V ca-certificates lsb-release wget + wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt update + sudo apt install -y -V libarrow-dev + - name: Install apache-arrow on macos + if: matrix.os == 'macOS-latest' + run: brew install apache-arrow - name: Install dependencies run: make install-python-ci-dependencies - name: Setup Redis Cluster diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index e1c7ed2de2..db9e48fc2d 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -153,6 +153,18 @@ jobs: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- - name: Install pip-tools run: pip install pip-tools + - name: Install apache-arrow on ubuntu + if: matrix.os == 'ubuntu-latest' + run: | + sudo apt update + sudo apt install -y -V ca-certificates lsb-release wget + wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt update + sudo apt install -y -V libarrow-dev + - name: Install apache-arrow on macos + if: matrix.os == 'macOS-latest' + run: brew install apache-arrow - name: Install dependencies run: make install-python-ci-dependencies - name: Setup Redis Cluster diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index a9bf3deba3..b3fadb2121 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -54,6 +54,18 @@ jobs: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- - name: Install pip-tools run: pip install pip-tools + - name: Install apache-arrow on ubuntu + if: matrix.os == 'ubuntu-latest' + run: | + sudo apt update + sudo apt install -y -V ca-certificates lsb-release wget + wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt update + sudo apt install -y -V libarrow-dev + - name: Install apache-arrow on macos + if: matrix.os == 'macOS-latest' + run: brew install apache-arrow - name: Install dependencies run: make install-python-ci-dependencies - name: Test Python @@ -92,9 +104,13 @@ jobs: uses: actions/setup-go@v2 with: go-version: 1.18.0 - - name: Install dependencies - run: make install-go-proto-dependencies - - name: Compile protos - run: make compile-protos-go + - name: Install apache-arrow on ubuntu + run: | + sudo apt update + sudo apt install -y -V ca-certificates lsb-release wget + wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt update + sudo apt install -y -V libarrow-dev - name: Test run: make test-go diff --git a/Makefile b/Makefile index 176e2cb354..4b541963af 100644 --- a/Makefile +++ b/Makefile @@ -172,12 +172,12 @@ install-go-proto-dependencies: install-go-ci-dependencies: # TODO: currently gopy installation doesn't work w/o explicit go get in the next line # TODO: there should be a better way to install gopy - go get github.com/go-python/gopy@v0.4.0 + go get github.com/go-python/gopy@v0.4.4 go install golang.org/x/tools/cmd/goimports # The `go get` command on the previous lines download the lib along with replacing the dep to `feast-dev/gopy` # but the following command is needed to install it for some reason. go install github.com/go-python/gopy - python -m pip install pybindgen==0.22.0 + python -m pip install pybindgen==0.22.0 protobuf==3.20.1 install-protoc-dependencies: pip install grpcio-tools==1.47.0 mypy-protobuf==3.1.0 @@ -186,18 +186,21 @@ compile-protos-go: install-go-proto-dependencies install-protoc-dependencies python setup.py build_go_protos compile-go-lib: install-go-proto-dependencies install-go-ci-dependencies - COMPILE_GO=True python setup.py build_ext --inplace + CGO_LDFLAGS_ALLOW=".*" COMPILE_GO=True python setup.py build_ext --inplace -# Needs feast package to setup the feature store -test-go: compile-protos-go +install-feast-ci-locally: pip install -e ".[ci]" - go test ./... + +# Needs feast package to setup the feature store +# CGO flag is due to this issue: https://github.com/golang/go/wiki/InvalidFlag +test-go: compile-protos-go compile-go-lib install-feast-ci-locally + CGO_LDFLAGS_ALLOW=".*" go test -tags cgo,ccalloc ./... format-go: gofmt -s -w go/ -lint-go: compile-protos-go - go vet ./go/internal/feast ./go/embedded +lint-go: compile-protos-go compile-go-lib + go vet -tags cgo,ccalloc ./go/internal/feast ./go/embedded # Docker diff --git a/docs/reference/feature-servers/go-feature-retrieval.md b/docs/reference/feature-servers/go-feature-retrieval.md index 685e7201cb..92a9ca2ebe 100644 --- a/docs/reference/feature-servers/go-feature-retrieval.md +++ b/docs/reference/feature-servers/go-feature-retrieval.md @@ -16,8 +16,20 @@ However, some additional dependencies are required for Go <-> Python interoperab ``` pip install feast[go] ``` +You will also have to install the apache-arrow c++ libraries, since we use the cgo memory allocator to prevent memory from being incorrectly garbage collected, detailed in these [docs](https://pkg.go.dev/github.com/apache/arrow/go/arrow@v0.0.0-20211112161151-bc219186db40/cdata#ExportArrowRecordBatch). -For developers, if you want to build from source, run `make compile-go-lib` to build and compile the go server. +For macos, run `brew install apache-arrow`. +For linux users, you have to install `libarrow-dev`. +``` +sudo apt update +sudo apt install -y -V ca-certificates lsb-release wget +wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb +sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb +sudo apt update +sudo apt install -y -V libarrow-dev # For C++ +``` + +For developers, if you want to build from source, run `make compile-go-lib` to build and compile the go server. In order to build the go binaries, you will need to install the `apache-arrow` c++ libraries. ## Usage @@ -63,7 +75,7 @@ feature_server: emit_timeout_micro_secs: 10000 queue_capacity: 10000 ``` -All these parameters are optional. +All these parameters are optional. ## Future/Current Work diff --git a/go.mod b/go.mod index 90ddb93e21..ef64636e73 100644 --- a/go.mod +++ b/go.mod @@ -2,12 +2,10 @@ module github.com/feast-dev/feast go 1.17 -replace github.com/go-python/gopy v0.4.0 => github.com/feast-dev/gopy v0.4.1-0.20220429180328-4257ac71a4d0 - require ( github.com/apache/arrow/go/v8 v8.0.0 github.com/ghodss/yaml v1.0.0 - github.com/go-python/gopy v0.4.0 + github.com/go-python/gopy v0.4.4 github.com/go-redis/redis/v8 v8.11.4 github.com/golang/protobuf v1.5.2 github.com/google/uuid v1.3.0 @@ -28,6 +26,8 @@ require ( github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect github.com/goccy/go-json v0.9.6 // indirect github.com/golang/snappy v0.0.4 // indirect + github.com/gonuts/commander v0.1.0 // indirect + github.com/gonuts/flag v0.1.0 // indirect github.com/google/flatbuffers v2.0.6+incompatible // indirect github.com/klauspost/asmfmt v1.3.2 // indirect github.com/klauspost/compress v1.15.1 // indirect @@ -38,7 +38,7 @@ require ( github.com/pmezard/go-difflib v1.0.0 // indirect github.com/zeebo/xxh3 v1.0.2 // indirect golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4 // indirect - golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect + golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect golang.org/x/net v0.0.0-20220407224826-aac1ed45d8e3 // indirect golang.org/x/sys v0.0.0-20220412211240-33da011f77ad // indirect golang.org/x/text v0.3.7 // indirect diff --git a/go.sum b/go.sum index 933ecf6b29..d0b7f8fcd8 100644 --- a/go.sum +++ b/go.sum @@ -87,8 +87,6 @@ github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go. github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/feast-dev/gopy v0.4.1-0.20220429180328-4257ac71a4d0 h1:Go714ObVP1O+a6qK7haXVL28QNm6WMD8bwnN9EA8PlM= -github.com/feast-dev/gopy v0.4.1-0.20220429180328-4257ac71a4d0/go.mod h1:ZO6vpitQ61NVoQP/2yOubPS6ET5pP3CAWCiMYn5eqCc= github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= @@ -112,6 +110,8 @@ github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9 github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= +github.com/go-python/gopy v0.4.4 h1:3LTsrfVcmg2VEM6wU+eh4d9EZn5H2iogObXjiQHrF8Q= +github.com/go-python/gopy v0.4.4/go.mod h1:tlA/KcD7rM8B+NQJR4SASwiinfKY0aiMFanHszR8BZA= github.com/go-redis/redis/v8 v8.11.4 h1:kHoYkfZP6+pe04aFTnhDH6GDROa5yJdHJVNxV3F46Tg= github.com/go-redis/redis/v8 v8.11.4/go.mod h1:2Z2wHZXdQpCDXEGzqMockDpNyYvi2l4Pxt6RJr792+w= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= @@ -147,7 +147,9 @@ github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiu github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/gonuts/commander v0.1.0 h1:EcDTiVw9oAVORFjQOEOuHQqcl6OXMyTgELocTq6zJ0I= github.com/gonuts/commander v0.1.0/go.mod h1:qkb5mSlcWodYgo7vs8ulLnXhfinhZsZcm6+H/z1JjgY= +github.com/gonuts/flag v0.1.0 h1:fqMv/MZ+oNGu0i9gp0/IQ/ZaPIDoAZBOBaJoV7viCWM= github.com/gonuts/flag v0.1.0/go.mod h1:ZTmTGtrSPejTo/SRNhCqwLTmiAgyBdCkLYhHrAoBdz4= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -439,8 +441,9 @@ golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= -golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 h1:kQgndtyPBW/JIYERgdxfwMYh3AVStj88WQTlNDi2a+o= golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= diff --git a/go/embedded/online_features.go b/go/embedded/online_features.go index f6b21169e1..e5860507e4 100644 --- a/go/embedded/online_features.go +++ b/go/embedded/online_features.go @@ -180,7 +180,7 @@ func (s *OnlineFeatureService) GetOnlineFeatures( outputFields := make([]arrow.Field, 0) outputColumns := make([]arrow.Array, 0) - pool := memory.NewGoAllocator() + pool := memory.NewCgoArrowAllocator() for _, featureVector := range resp { outputFields = append(outputFields, arrow.Field{ diff --git a/go/internal/feast/onlinestore/sqliteonlinestore_test.go b/go/internal/feast/onlinestore/sqliteonlinestore_test.go index 5af1c1f4ce..e5e6e85e56 100644 --- a/go/internal/feast/onlinestore/sqliteonlinestore_test.go +++ b/go/internal/feast/onlinestore/sqliteonlinestore_test.go @@ -17,9 +17,9 @@ import ( func TestSqliteAndFeatureRepoSetup(t *testing.T) { dir := t.TempDir() feature_repo_path := filepath.Join(dir, "feature_repo") + err := test.SetupCleanFeatureRepo(dir) assert.Nil(t, err) - config, err := registry.NewRepoConfigFromFile(feature_repo_path) assert.Nil(t, err) assert.Equal(t, "feature_repo", config.Project) @@ -37,9 +37,9 @@ func TestSqliteOnlineRead(t *testing.T) { dir := t.TempDir() feature_repo_path := filepath.Join(dir, "feature_repo") test.SetupCleanFeatureRepo(dir) - config, err := registry.NewRepoConfigFromFile(feature_repo_path) assert.Nil(t, err) + store, err := NewSqliteOnlineStore("feature_repo", config, config.OnlineStore) defer store.Destruct() assert.Nil(t, err) diff --git a/setup.py b/setup.py index 217b2e1011..deaf47f955 100644 --- a/setup.py +++ b/setup.py @@ -430,7 +430,7 @@ def build_extension(self, ext: Extension): destination = os.path.dirname(os.path.abspath(self.get_ext_fullpath(ext.name))) subprocess.check_call(["go", "install", "golang.org/x/tools/cmd/goimports"], env={"PATH": bin_path, **go_env}) - subprocess.check_call(["go", "get", "github.com/go-python/gopy@v0.4.0"], + subprocess.check_call(["go", "get", "github.com/go-python/gopy@v0.4.4"], env={"PATH": bin_path, **go_env}) subprocess.check_call(["go", "install", "github.com/go-python/gopy"], env={"PATH": bin_path, **go_env}) @@ -442,6 +442,9 @@ def build_extension(self, ext: Extension): destination, "-vm", sys.executable, + "--build-tags", + 'cgo,ccalloc', + "--dynamic-link=True", "-no-make", *ext.sources, ], From 292adc2793594575f06387d4750b02a452621c5b Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Mon, 11 Jul 2022 13:28:08 -0400 Subject: [PATCH 24/73] ci: Fixing local integration tests, defaulting to test containers (#2927) --- .../workflows/pr_local_integration_tests.yml | 63 +++++++++++++++++++ CONTRIBUTING.md | 24 ++++--- Makefile | 25 ++++++-- sdk/python/tests/conftest.py | 5 +- .../feature_repos/repo_configuration.py | 2 + .../universal/online_store/datastore.py | 2 +- .../universal/online_store/dynamodb.py | 2 +- .../universal/online_store/hbase.py | 2 +- .../universal/online_store/redis.py | 2 +- .../integration/registration/test_registry.py | 11 ++++ 10 files changed, 121 insertions(+), 17 deletions(-) create mode 100644 .github/workflows/pr_local_integration_tests.yml diff --git a/.github/workflows/pr_local_integration_tests.yml b/.github/workflows/pr_local_integration_tests.yml new file mode 100644 index 0000000000..0736ae29dc --- /dev/null +++ b/.github/workflows/pr_local_integration_tests.yml @@ -0,0 +1,63 @@ +name: pr-local-integration-tests +# This runs local tests with containerized stubs of online stores. This is the main dev workflow + +on: + pull_request_target: + types: + - opened + - synchronize + - labeled + +jobs: + integration-test-python-local: + # all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. + if: + (github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm'))) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: [ "3.8" ] + os: [ ubuntu-latest ] + env: + OS: ${{ matrix.os }} + PYTHON: ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v2 + with: + # pull_request_target runs the workflow in the context of the base repo + # as such actions/checkout needs to be explicit configured to retrieve + # code from the PR. + ref: refs/pull/${{ github.event.pull_request.number }}/merge + submodules: recursive + - name: Setup Python + uses: actions/setup-python@v2 + id: setup-python + with: + python-version: ${{ matrix.python-version }} + architecture: x64 + - name: Upgrade pip version + run: | + pip install --upgrade "pip>=21.3.1,<22.1" + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: pip cache + uses: actions/cache@v2 + with: + path: | + ${{ steps.pip-cache.outputs.dir }} + /opt/hostedtoolcache/Python + /Users/runner/hostedtoolcache/Python + key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }} + restore-keys: | + ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- + - name: Install pip-tools + run: pip install pip-tools + - name: Install dependencies + run: make install-python-ci-dependencies + - name: Test local integration tests + if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak + run: make test-python-integration-local diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4bd14d762a..9c25a835bd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -133,17 +133,19 @@ make test-python ### Integration Tests There are two sets of tests you can run: -1. Local integration tests (for faster development) +1. Local integration tests (for faster development, tests file offline store & key online stores) 2. Full integration tests (requires cloud environment setups) #### Local integration tests -To get local integration tests running, you'll need to have Redis setup: +For this approach of running tests, you'll need to have docker set up locally: [Get Docker](https://docs.docker.com/get-docker/) -Redis -1. Install Redis: [Quickstart](https://redis.io/topics/quickstart) -2. Run `redis-server` +It leverages a file based offline store to test against emulated versions of Datastore, DynamoDB, and Redis, using ephemeral containers. -Now run `make test-python-universal-local` +These tests create new temporary tables / datasets locally only, and they are cleaned up. when the containers are torn down. + +```sh +make test-python-integration-local +``` #### Full integration tests To test across clouds, on top of setting up Redis, you also need GCP / AWS / Snowflake setup. @@ -166,7 +168,15 @@ To test across clouds, on top of setting up Redis, you also need GCP / AWS / Sno 2. Modify `RedshiftDataSourceCreator` to use your credentials **Snowflake** -- See https://signup.snowflake.com/ +1. See https://signup.snowflake.com/ to setup a trial. +2. Then to run successfully, you'll need some environment variables setup: +```sh +export SNOWFLAKE_CI_DEPLOYMENT='[snowflake_deployment]' +export SNOWFLAKE_CI_USER='[your user]' +export SNOWFLAKE_CI_PASSWORD='[your pw]' +export SNOWFLAKE_CI_ROLE='[your CI role e.g. SYSADMIN]' +export SNOWFLAKE_CI_WAREHOUSE='[your warehouse]' +``` Then run `make test-python-integration`. Note that for Snowflake / GCP / AWS, this will create new temporary tables / datasets. diff --git a/Makefile b/Makefile index 4b541963af..6d733ac61f 100644 --- a/Makefile +++ b/Makefile @@ -68,8 +68,26 @@ test-python: test-python-integration: FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration sdk/python/tests +test-python-integration-local: + @(docker info > /dev/null 2>&1 && \ + FEAST_USAGE=False \ + IS_TEST=True \ + FEAST_IS_LOCAL_TEST=True \ + FEAST_LOCAL_ONLINE_CONTAINER=True \ + python -m pytest -n 8 --integration \ + -k "not test_apply_entity_integration and \ + not test_apply_feature_view_integration and \ + not test_apply_data_source_integration" \ + sdk/python/tests \ + ) || echo "This script uses Docker, and it isn't running - please start the Docker Daemon and try again!"; + test-python-integration-container: - FEAST_USAGE=False IS_TEST=True FEAST_LOCAL_ONLINE_CONTAINER=True python -m pytest -n 8 --integration sdk/python/tests + @(docker info > /dev/null 2>&1 && \ + FEAST_USAGE=False \ + IS_TEST=True \ + FEAST_LOCAL_ONLINE_CONTAINER=True \ + python -m pytest -n 8 --integration sdk/python/tests \ + ) || echo "This script uses Docker, and it isn't running - please start the Docker Daemon and try again!"; test-python-universal-contrib: PYTHONPATH='.' \ @@ -104,14 +122,11 @@ test-python-universal-postgres: not test_universal_types" \ sdk/python/tests -test-python-universal-local: - FEAST_USAGE=False IS_TEST=True FEAST_IS_LOCAL_TEST=True python -m pytest -n 8 --integration sdk/python/tests - test-python-universal: FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration sdk/python/tests test-python-go-server: compile-go-lib - FEAST_USAGE=False IS_TEST=True FEAST_GO_FEATURE_RETRIEVAL=True pytest --integration --goserver sdk/python/tests + FEAST_USAGE=False IS_TEST=True pytest --integration --goserver sdk/python/tests format-python: # Sort diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index d77d7e82fd..1700970f1e 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -110,7 +110,10 @@ def pytest_collection_modifyitems(config, items: List[Item]): items.append(t) goserver_tests = [t for t in items if "goserver" in t.keywords] - if should_run_goserver: + if not should_run_goserver: + for t in goserver_tests: + items.remove(t) + else: items.clear() for t in goserver_tests: items.append(t) diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index a168f4f028..4dc1db4a13 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -91,6 +91,7 @@ "sqlite": ({"type": "sqlite"}, None), } +# Only configure Cloud DWH if running full integration tests if os.getenv("FEAST_IS_LOCAL_TEST", "False") != "True": AVAILABLE_OFFLINE_STORES.extend( [ @@ -141,6 +142,7 @@ } +# Replace online stores with emulated online stores if we're running local integration tests if os.getenv("FEAST_LOCAL_ONLINE_CONTAINER", "False").lower() == "true": replacements: Dict[ str, Tuple[Union[str, Dict[str, str]], Optional[Type[OnlineStoreCreator]]] diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/datastore.py b/sdk/python/tests/integration/feature_repos/universal/online_store/datastore.py index 6067a1ff4b..b5bbb94f7c 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/datastore.py +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/datastore.py @@ -27,7 +27,7 @@ def create_online_store(self) -> Dict[str, str]: self.container.start() log_string_to_wait_for = r"\[datastore\] Dev App Server is now running" wait_for_logs( - container=self.container, predicate=log_string_to_wait_for, timeout=5 + container=self.container, predicate=log_string_to_wait_for, timeout=10 ) exposed_port = self.container.get_exposed_port("8081") os.environ[datastore.client.DATASTORE_EMULATOR_HOST] = f"0.0.0.0:{exposed_port}" diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/dynamodb.py b/sdk/python/tests/integration/feature_repos/universal/online_store/dynamodb.py index 473b7acee9..1aefdffb24 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/dynamodb.py +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/dynamodb.py @@ -21,7 +21,7 @@ def create_online_store(self) -> Dict[str, str]: "Initializing DynamoDB Local with the following configuration:" ) wait_for_logs( - container=self.container, predicate=log_string_to_wait_for, timeout=5 + container=self.container, predicate=log_string_to_wait_for, timeout=10 ) exposed_port = self.container.get_exposed_port("8000") return { diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/hbase.py b/sdk/python/tests/integration/feature_repos/universal/online_store/hbase.py index ecaace8709..dba611b30b 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/hbase.py +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/hbase.py @@ -19,7 +19,7 @@ def create_online_store(self) -> Dict[str, str]: "Initializing Hbase Local with the following configuration:" ) wait_for_logs( - container=self.container, predicate=log_string_to_wait_for, timeout=5 + container=self.container, predicate=log_string_to_wait_for, timeout=10 ) exposed_port = self.container.get_exposed_port("9090") return {"type": "hbase", "host": "127.0.0.1", "port": exposed_port} diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py b/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py index 4995187665..11d62d9d30 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py @@ -17,7 +17,7 @@ def create_online_store(self) -> Dict[str, str]: self.container.start() log_string_to_wait_for = "Ready to accept connections" wait_for_logs( - container=self.container, predicate=log_string_to_wait_for, timeout=5 + container=self.container, predicate=log_string_to_wait_for, timeout=10 ) exposed_port = self.container.get_exposed_port("6379") return {"type": "redis", "connection_string": f"localhost:{exposed_port},db=0"} diff --git a/sdk/python/tests/integration/registration/test_registry.py b/sdk/python/tests/integration/registration/test_registry.py index 36e19e222a..ac7696f6e7 100644 --- a/sdk/python/tests/integration/registration/test_registry.py +++ b/sdk/python/tests/integration/registration/test_registry.py @@ -571,7 +571,18 @@ def test_apply_feature_view_integration(test_registry): @pytest.mark.parametrize( "test_registry", [lazy_fixture("gcs_registry"), lazy_fixture("s3_registry")], ) +def test_apply_data_source_integration(test_registry: Registry): + run_test_data_source_apply(test_registry) + + +@pytest.mark.parametrize( + "test_registry", [lazy_fixture("local_registry")], +) def test_apply_data_source(test_registry: Registry): + run_test_data_source_apply(test_registry) + + +def run_test_data_source_apply(test_registry: Registry): # Create Feature Views batch_source = FileSource( name="test_source", From bdeb4aeaf2a5cfa144a65cc84f7bfb26e3077e7a Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Mon, 11 Jul 2022 13:55:22 -0700 Subject: [PATCH 25/73] fix: Fix build wheels workflow to install apache-arrow correctly (#2932) * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang --- .github/workflows/build_wheels.yml | 18 ++++++------------ .../workflows/pr_local_integration_tests.yml | 9 +++++++++ 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index c69ba4687d..1b0059f1a3 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -62,18 +62,6 @@ jobs: registry-url: 'https://registry.npmjs.org' - name: Build UI run: make build-ui - - name: Install apache-arrow on ubuntu - if: matrix.os == 'ubuntu-latest' - run: | - sudo apt update - sudo apt install -y -V ca-certificates lsb-release wget - wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb - sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb - sudo apt update - sudo apt install -y -V libarrow-dev - - name: Install apache-arrow on macos - if: matrix.os == 'macOS-latest' - run: brew install apache-arrow - name: Build wheels uses: pypa/cibuildwheel@v2.7.0 env: @@ -86,7 +74,12 @@ jobs: curl -o go.tar.gz https://dl.google.com/go/go1.18.2.linux-amd64.tar.gz tar -C /usr/local -xzf go.tar.gz go version + yum -y update && + yum install -y epel-release || yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-$(cut -d: -f5 /etc/system-release-cpe | cut -d. -f1).noarch.rpm && + yum install -y https://apache.jfrog.io/artifactory/arrow/centos/$(cut -d: -f5 /etc/system-release-cpe | cut -d. -f1)/apache-arrow-release-latest.rpm && + yum install -y --enablerepo=epel arrow-devel # For C++ CIBW_BEFORE_ALL_MACOS: | + brew install apache-arrow curl -o python.pkg https://www.python.org/ftp/python/3.9.12/python-3.9.12-macosx10.9.pkg sudo installer -pkg python.pkg -target / # There's a `git restore` in here because `make install-go-ci-dependencies` is actually messing up go.mod & go.sum. @@ -111,6 +104,7 @@ jobs: CIBW_BEFORE_BUILD: | git status git restore go.mod go.sum + brew install apache-arrow - uses: actions/upload-artifact@v2 with: name: wheels diff --git a/.github/workflows/pr_local_integration_tests.yml b/.github/workflows/pr_local_integration_tests.yml index 0736ae29dc..0847083393 100644 --- a/.github/workflows/pr_local_integration_tests.yml +++ b/.github/workflows/pr_local_integration_tests.yml @@ -56,6 +56,15 @@ jobs: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- - name: Install pip-tools run: pip install pip-tools + - name: Install apache-arrow on ubuntu + if: matrix.os == 'ubuntu-latest' + run: | + sudo apt update + sudo apt install -y -V ca-certificates lsb-release wget + wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt update + sudo apt install -y -V libarrow-dev - name: Install dependencies run: make install-python-ci-dependencies - name: Test local integration tests From 9fc81a217a38851a1a781a7bc2f972211bfe406b Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Mon, 11 Jul 2022 17:12:00 -0700 Subject: [PATCH 26/73] chore: Update docs with new release workflow (#2898) * Update docs Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang --- docs/project/release-process.md | 81 ++++++++++----------------------- 1 file changed, 24 insertions(+), 57 deletions(-) diff --git a/docs/project/release-process.md b/docs/project/release-process.md index af573c92c7..f7102d13c3 100644 --- a/docs/project/release-process.md +++ b/docs/project/release-process.md @@ -4,60 +4,27 @@ For Feast maintainers, these are the concrete steps for making a new release. -1. For new major or minor release, create and check out the release branch for the new stream, e.g. `v0.6-branch`. For a patch version, check out the stream's release branch. -2. Update the [CHANGELOG.md](../../CHANGELOG.md). See the [Creating a change log](release-process.md#creating-a-change-log) guide and commit - * Make to review each PR in the changelog to [flag any breaking changes and deprecation.](release-process.md#flag-breaking-changes-and-deprecations) -3. Update versions for the release/release candidate with a commit: - 1. In the root `pom.xml`, remove `-SNAPSHOT` from the `` property, update versions, and commit. - 2. Tag the commit with the release version, using a `v` and `sdk/go/v` prefixes - * for a release candidate, create tags `vX.Y.Z-rc.N`and `sdk/go/vX.Y.Z-rc.N` - * for a stable release `X.Y.Z` create tags `vX.Y.Z` and `sdk/go/vX.Y.Z` - 3. Check that versions are updated with `make lint-versions`. - 4. If changes required are flagged by the version lint, make the changes, amend the commit and move the tag to the new commit. -4. Push the commits and tags. Make sure the CI passes. - * If the CI does not pass, or if there are new patches for the release fix, repeat step 2 & 3 with release candidates until stable release is achieved. -5. Bump to the next patch version in the release branch, append `-SNAPSHOT` in `pom.xml` and push. -6. Create a PR against master to: - 1. Bump to the next major/minor version and append `-SNAPSHOT` . - 2. Add the change log by applying the change log commit created in step 2. - 3. Check that versions are updated with `env TARGET_MERGE_BRANCH=master make lint-versions` -7. Create a [GitHub release](https://github.com/feast-dev/feast/releases) which includes a summary of im~~p~~ortant changes as well as any artifacts associated with the release. Make sure to include the same change log as added in [CHANGELOG.md](../../CHANGELOG.md). Use `Feast vX.Y.Z` as the title. - -When a tag that matches a Semantic Version string is pushed, CI will automatically build and push the relevant artifacts to their repositories or package managers (docker images, Python wheels, etc). JVM artifacts are promoted from Sonatype OSSRH to Maven Central, but it sometimes takes some time for them to be available. The `sdk/go/v tag` is required to version the Go SDK go module so that users can go get a specific tagged release of the Go SDK. - -### Creating a change log - -We use an [open source change log generator](https://hub.docker.com/r/ferrarimarco/github-changelog-generator/) to generate change logs. The process still requires a little bit of manual effort. - -1. Create a GitHub token as [per these instructions](https://github.com/github-changelog-generator/github-changelog-generator#github-token). The token is used as an input argument (`-t`) to the change log generator. -2. The change log generator configuration below will look for unreleased changes on a specific branch. The branch will be `master` for a major/minor release, or a release branch (`v0.4-branch`) for a patch release. You will need to set the branch using the `--release-branch` argument. -3. You should also set the `--future-release` argument. This is the version you are releasing. The version can still be changed at a later date. -4. Update the arguments below and run the command to generate the change log to the console. - -``` -docker run -it --rm ferrarimarco/github-changelog-generator \ ---user feast-dev \ ---project feast \ ---release-branch \ ---future-release \ ---unreleased-only \ ---no-issues \ ---bug-labels kind/bug \ ---enhancement-labels kind/feature \ ---breaking-labels compat/breaking \ --t \ ---max-issues 1 \ --o -``` - -1. Review each change log item. - * Make sure that sentences are grammatically correct and well formatted (although we will try to enforce this at the PR review stage). - * Make sure that each item is categorised correctly. You will see the following categories: `Breaking changes`, `Implemented enhancements`, `Fixed bugs`, and `Merged pull requests`. Any unlabelled PRs will be found in `Merged pull requests`. It's important to make sure that any `breaking changes`, `enhancements`, or `bug fixes` are pulled up out of `merged pull requests` into the correct category. Housekeeping, tech debt clearing, infra changes, or refactoring do not count as `enhancements`. Only enhancements a user benefits from should be listed in that category. - * Make sure that the "Full Change log" link is actually comparing the correct tags (normally your released version against the previously version). - * Make sure that release notes and breaking changes are present. - -### Flag Breaking Changes & Deprecations - -It's important to flag breaking changes and deprecation to the API for each release so that we can maintain API compatibility. - -Developers should have flagged PRs with breaking changes with the `compat/breaking` label. However, it's important to double check each PR's release notes and contents for changes that will break API compatibility and manually label `compat/breaking` to PRs with undeclared breaking changes. The change log will have to be regenerated if any new labels have to be added. +### Pre-release Verification (Verification that wheels are built correctly) for minor release. +1. Merge upstream master changes into your **fork**. Make sure you are running the workflow off of your fork! +2. Create a tag manually for the release on your fork. For example, if you are doing a release for version 0.22.0, create a tag by doing the following. + - Checkout master branch and run `git tag v0.22.0`. + - Run `git push --tags` to push the tag to your forks master branch. +3. Access the `Actions` tab on your github UI on your fork and click the `build_wheels` action. This workflow will build the python sdk wheels for Python 3.8-3.10 on MacOS 10.15 and Linux and verify that these wheels are correct. The publish workflow uses this action to publish the python wheels for a new release to pypi. +4. Look for the header `This workflow has a workflow_dispatch event trigger` and click `Run Workflow` on the right. +5. Run the workflow off of the tag you just created(`v0.22.0` in this case) and verify that the workflow worked (i.e ensure that all jobs are green). + +### Pre-release Verification (Verification that wheels are built correctly) for patch release. +1. Check out the branch of your release (e.g `v0.22-branch` on your local **fork**) and push this to your fork (`git push -u origin `). +2. Cherry pick commits that are relevant to the patch release onto your forked branch. +3. Checkout the release branch and add a patch release tag (e.g `v0.22.1`) by running `git tag `. +4. Push tags to your origin branch with `git push origin `. +5. Kick off `build_wheels` workflow in the same way as is detailed in the last section on of the patch release tag. + +### Release for Python and Java SDK +1. Generate a [Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) or retrieve your saved personal access token. + - The personal access token should have all of the permissions under the `repo` checkbox. +2. Access the `Actions` tab on the main `feast-dev/feast` repo and find the `release` action. +3. Look for the header `This workflow has a workflow_dispatch event trigger` again and click `Run Workflow` on the right. +4. Try the dry run first with your personal access token. If this succeeds, uncheck `Dry Run` and run the release workflow. +5. All of the jobs should succeed besides the UI job which needs to be released separately. Ping a maintainer on Slack to run the UI release manually. +6. Try to install the feast release in your local environment and test out the `feast init` -> `feast apply` workflow to verify as a sanity check that the release worked correctly. From 4394696e488c3f8cd7b824c9bbe6ae8fdbb64d90 Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Mon, 11 Jul 2022 17:35:43 -0700 Subject: [PATCH 27/73] ci: Add a nightly CI job for integration tests (#2652) * Remove 3.7 wheels Signed-off-by: Kevin Zhang * ci: Add a nightly CI job for integration tests Signed-off-by: Achal Shah * Update and rebase Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang Co-authored-by: Kevin Zhang --- .github/workflows/build_wheels.yml | 4 +- .github/workflows/nightly-ci.yml | 140 +++++++++++++++++++++ .github/workflows/pr_integration_tests.yml | 2 +- 3 files changed, 143 insertions(+), 3 deletions(-) create mode 100644 .github/workflows/nightly-ci.yml diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 1b0059f1a3..da84fc5e55 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -66,7 +66,7 @@ jobs: uses: pypa/cibuildwheel@v2.7.0 env: CIBW_BUILD: "cp3*_x86_64" - CIBW_SKIP: "cp36-* *-musllinux_x86_64 cp310-macosx_x86_64" + CIBW_SKIP: "cp36-* cp37-* *-musllinux_x86_64 cp310-macosx_x86_64" CIBW_ARCHS: "native" CIBW_ENVIRONMENT: > COMPILE_GO=True PATH=$PATH:/usr/local/go/bin @@ -150,7 +150,7 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-10.15 ] - python-version: [ "3.7", "3.8", "3.9", "3.10"] + python-version: [ "3.8", "3.9", "3.10"] from-source: [ True, False ] env: # this script is for testing servers diff --git a/.github/workflows/nightly-ci.yml b/.github/workflows/nightly-ci.yml new file mode 100644 index 0000000000..06571c2dd3 --- /dev/null +++ b/.github/workflows/nightly-ci.yml @@ -0,0 +1,140 @@ +name: nightly-ci + +on: + schedule: + - cron: '00 08 * * *' # early morning 08:00 AM UTC, which is 1 AM PST/4 AM EST. + +# concurrency is currently broken, see details https://github.com/actions/runner/issues/1532 +#concurrency: +# group: pr-integration-tests-${{ github.event.pull_request.number }} +# cancel-in-progress: true + +jobs: + check_date: + runs-on: ubuntu-latest + name: Check latest commit + outputs: + WAS_EDITED: ${{ steps.check_date.outputs.WAS_EDITED }} + steps: + - uses: actions/checkout@v2 + with: + ref: develop + - id: check_date + name: Check if there were commits in the last day + if: ${{ github.event_name == 'schedule' }} + run: echo '::set-output name=WAS_EDITED::'$(test -n "$(git log --format=%H --since='24 hours ago')" && echo 'true' || echo 'false') + + integration-test-python: + needs: [check_date] + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: [ "3.8" ] + os: [ ubuntu-latest ] + env: + OS: ${{ matrix.os }} + PYTHON: ${{ matrix.python-version }} + services: + redis: + image: redis + ports: + - 6379:6379 + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + steps: + - uses: actions/checkout@v2 + with: + # pull_request_target runs the workflow in the context of the base repo + # as such actions/checkout needs to be explicit configured to retrieve + # code from the PR. + ref: refs/pull/${{ github.event.pull_request.number }}/merge + submodules: recursive + - name: Setup Python + uses: actions/setup-python@v2 + id: setup-python + with: + python-version: ${{ os.PYTHON }} + architecture: x64 + - name: Setup Go + id: setup-go + uses: actions/setup-go@v2 + with: + go-version: 1.18.0 + - name: Set up gcloud SDK + uses: google-github-actions/setup-gcloud@v0 + with: + project_id: ${{ secrets.GCP_PROJECT_ID }} + service_account_key: ${{ secrets.GCP_SA_KEY }} + export_default_credentials: true + - name: Use gcloud CLI + run: gcloud info + - name: Set up AWS SDK + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-west-2 + - name: Use AWS CLI + run: aws sts get-caller-identity + - name: Upgrade pip version + run: | + pip install --upgrade "pip>=21.3.1,<22.1" + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: pip cache + uses: actions/cache@v2 + with: + path: | + ${{ steps.pip-cache.outputs.dir }} + /opt/hostedtoolcache/Python + /Users/runner/hostedtoolcache/Python + key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }} + restore-keys: | + ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- + - name: Install pip-tools + run: pip install pip-tools + - name: Install apache-arrow on ubuntu + if: matrix.os == 'ubuntu-latest' + run: | + sudo apt update + sudo apt install -y -V ca-certificates lsb-release wget + wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt update + sudo apt install -y -V libarrow-dev + - name: Install apache-arrow on macos + if: matrix.os == 'macOS-latest' + run: brew install apache-arrow + - name: Install dependencies + run: make install-python-ci-dependencies + - name: Setup Redis Cluster + run: | + docker pull vishnunair/docker-redis-cluster:latest + docker run -d -p 6001:6379 -p 6002:6380 -p 6003:6381 -p 6004:6382 -p 6005:6383 -p 6006:6384 --name redis-cluster vishnunair/docker-redis-cluster + - name: Test python + if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak + env: + FEAST_SERVER_DOCKER_IMAGE_TAG: ${{ needs.build-docker-image.outputs.DOCKER_IMAGE_TAG }} + FEAST_USAGE: "False" + IS_TEST: "True" + SNOWFLAKE_CI_DEPLOYMENT: ${{ secrets.SNOWFLAKE_CI_DEPLOYMENT }} + SNOWFLAKE_CI_USER: ${{ secrets.SNOWFLAKE_CI_USER }} + SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }} + SNOWFLAKE_CI_ROLE: ${{ secrets.SNOWFLAKE_CI_ROLE }} + SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }} + run: pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: ./coverage.xml + flags: integrationtests + env_vars: OS,PYTHON + fail_ci_if_error: true + verbose: true \ No newline at end of file diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index db9e48fc2d..8f64950a30 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -191,4 +191,4 @@ jobs: flags: integrationtests env_vars: OS,PYTHON fail_ci_if_error: true - verbose: true + verbose: true \ No newline at end of file From 16ae902909911bbf45d0e430895b3bc20bba01e9 Mon Sep 17 00:00:00 2001 From: Hai Nguyen Date: Tue, 12 Jul 2022 11:59:06 +0700 Subject: [PATCH 28/73] fix: Resolve small typo in README file (#2930) Fix small type in README file Signed-off-by: Hai Nguyen --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 0f77fbd42c..6bd7c95b7d 100644 --- a/README.md +++ b/README.md @@ -194,7 +194,7 @@ The list below contains the functionality that contributors are planning to deve * [ ] Java Client * [ ] Go Client * [ ] Delete API - * [] Feature Logging (for training) + * [ ] Feature Logging (for training) * **Data Quality Management (See [RFC](https://docs.google.com/document/d/110F72d4NTv80p35wDSONxhhPBqWRwbZXG4f9mNEMd98/edit))** * [x] Data profiling and validation (Great Expectations) * [ ] Training-serving skew detection (in progress) @@ -230,4 +230,4 @@ Thanks goes to these incredible people: - \ No newline at end of file + From 040c9107b719a7b3f3c70ab743f148e47b0a0982 Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Tue, 12 Jul 2022 10:02:24 -0700 Subject: [PATCH 29/73] fix: Deprecate 3.7 wheels and fix verification workflow (#2934) * Remove 3.7 wheels Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang --- .github/workflows/build_wheels.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index da84fc5e55..4a6bc34d09 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -187,6 +187,18 @@ jobs: cd dist/ pip install wheel for f in *.whl; do pip install $f || true; done + - name: Install apache-arrow on ubuntu + if: ${{ matrix.from-source && matrix.os == 'ubuntu-latest' }} + run: | + sudo apt update + sudo apt install -y -V ca-certificates lsb-release wget + wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb + sudo apt update + sudo apt install -y -V libarrow-dev + - name: Install apache-arrow on macos + if: ${{ matrix.from-source && matrix.os == 'macos-10.15' && matrix.python-version != '3.10' }} + run: brew install apache-arrow - name: Install dist with go if: ${{ matrix.from-source && (matrix.python-version != '3.10' || matrix.os == 'ubuntu-latest')}} env: From b917540c27052c01f872a2de686a6dd3b7a16e9c Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Tue, 12 Jul 2022 10:02:38 -0700 Subject: [PATCH 30/73] fix: Fix night ci syntax error and update readme (#2935) * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang --- .github/workflows/nightly-ci.yml | 4 ++-- README.md | 2 +- docs/roadmap.md | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/nightly-ci.yml b/.github/workflows/nightly-ci.yml index 06571c2dd3..f125ebdc23 100644 --- a/.github/workflows/nightly-ci.yml +++ b/.github/workflows/nightly-ci.yml @@ -25,7 +25,7 @@ jobs: run: echo '::set-output name=WAS_EDITED::'$(test -n "$(git log --format=%H --since='24 hours ago')" && echo 'true' || echo 'false') integration-test-python: - needs: [check_date] + needs: check_date runs-on: ${{ matrix.os }} strategy: fail-fast: false @@ -45,7 +45,7 @@ jobs: --health-interval 10s --health-timeout 5s --health-retries 5 - steps: + steps: - uses: actions/checkout@v2 with: # pull_request_target runs the workflow in the context of the base repo diff --git a/README.md b/README.md index 6bd7c95b7d..64f6c5940f 100644 --- a/README.md +++ b/README.md @@ -230,4 +230,4 @@ Thanks goes to these incredible people: - + \ No newline at end of file diff --git a/docs/roadmap.md b/docs/roadmap.md index 19af4f95c9..8461256a15 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -59,7 +59,7 @@ The list below contains the functionality that contributors are planning to deve * [ ] Java Client * [ ] Go Client * [ ] Delete API - * [] Feature Logging (for training) + * [ ] Feature Logging (for training) * **Data Quality Management (See [RFC](https://docs.google.com/document/d/110F72d4NTv80p35wDSONxhhPBqWRwbZXG4f9mNEMd98/edit))** * [x] Data profiling and validation (Great Expectations) * [ ] Training-serving skew detection (in progress) From 054446cca05b7341dcb5ebe5ee3bc9d679f632e5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Jul 2022 10:36:07 -0700 Subject: [PATCH 31/73] chore(deps): Bump moment from 2.29.3 to 2.29.4 in /sdk/python/feast/ui (#2925) chore(deps): bump moment from 2.29.3 to 2.29.4 in /sdk/python/feast/ui Bumps [moment](https://github.com/moment/moment) from 2.29.3 to 2.29.4. - [Release notes](https://github.com/moment/moment/releases) - [Changelog](https://github.com/moment/moment/blob/develop/CHANGELOG.md) - [Commits](https://github.com/moment/moment/compare/2.29.3...2.29.4) --- updated-dependencies: - dependency-name: moment dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/feast/ui/package.json | 2 +- sdk/python/feast/ui/yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/python/feast/ui/package.json b/sdk/python/feast/ui/package.json index ec04655723..883c19660b 100644 --- a/sdk/python/feast/ui/package.json +++ b/sdk/python/feast/ui/package.json @@ -13,7 +13,7 @@ "@types/d3": "^7.1.0", "d3": "^7.4.4", "inter-ui": "^3.19.3", - "moment": "^2.29.3", + "moment": "^2.29.4", "prop-types": "^15.8.1", "query-string": "^7.1.1", "react": "^18.1.0", diff --git a/sdk/python/feast/ui/yarn.lock b/sdk/python/feast/ui/yarn.lock index 83157394ca..f2fd12b4e5 100644 --- a/sdk/python/feast/ui/yarn.lock +++ b/sdk/python/feast/ui/yarn.lock @@ -7143,10 +7143,10 @@ mkdirp@~0.5.1: dependencies: minimist "^1.2.6" -moment@^2.29.1, moment@^2.29.3: - version "2.29.3" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.3.tgz#edd47411c322413999f7a5940d526de183c031f3" - integrity sha512-c6YRvhEo//6T2Jz/vVtYzqBzwvPT95JBQ+smCytzf7c50oMZRsR/a4w88aD34I+/QVSfnoAnSBFPJHItlOMJVw== +moment@^2.29.1, moment@^2.29.4: + version "2.29.4" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108" + integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w== ms@2.0.0: version "2.0.0" From 6f79069c561eba888d070c46aae920f7ad0c2319 Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Thu, 14 Jul 2022 11:35:24 -0700 Subject: [PATCH 32/73] feat: Add an experimental lambda-based materialization engine (#2923) * feat: Add an experimental lambda-based materialization engine Signed-off-by: Achal Shah * setup and teardown lambda func Signed-off-by: Achal Shah * actually get the test working correctly Signed-off-by: Achal Shah * actually get the test working correctly Signed-off-by: Achal Shah * parallelize with threads Signed-off-by: Achal Shah * super call Signed-off-by: Achal Shah * fix bugs Signed-off-by: Achal Shah * fix tests Signed-off-by: Achal Shah * fix tests Signed-off-by: Achal Shah * undo unintended changes Signed-off-by: Achal Shah --- sdk/python/feast/infra/aws.py | 12 +- .../infra/materialization/lambda/Dockerfile | 25 ++ .../infra/materialization/lambda/__init__.py | 11 + .../feast/infra/materialization/lambda/app.py | 82 ++++++ .../materialization/lambda/lambda_engine.py | 238 ++++++++++++++++++ .../feast/infra/online_stores/dynamodb.py | 3 +- .../feast/infra/passthrough_provider.py | 18 +- sdk/python/feast/repo_config.py | 41 ++- sdk/python/tests/conftest.py | 16 ++ .../integration_test_repo_config.py | 9 + .../feature_repos/repo_configuration.py | 17 +- .../materialization/test_lambda.py | 200 +++++++++++++++ 12 files changed, 655 insertions(+), 17 deletions(-) create mode 100644 sdk/python/feast/infra/materialization/lambda/Dockerfile create mode 100644 sdk/python/feast/infra/materialization/lambda/__init__.py create mode 100644 sdk/python/feast/infra/materialization/lambda/app.py create mode 100644 sdk/python/feast/infra/materialization/lambda/lambda_engine.py create mode 100644 sdk/python/tests/integration/materialization/test_lambda.py diff --git a/sdk/python/feast/infra/aws.py b/sdk/python/feast/infra/aws.py index 14301faf19..4109856e60 100644 --- a/sdk/python/feast/infra/aws.py +++ b/sdk/python/feast/infra/aws.py @@ -106,6 +106,15 @@ def update_infra( self._deploy_feature_server(project, image_uri) + if self.batch_engine: + self.batch_engine.update( + project, + tables_to_delete, + tables_to_keep, + entities_to_delete, + entities_to_keep, + ) + def _deploy_feature_server(self, project: str, image_uri: str): _logger.info("Deploying feature server...") @@ -198,8 +207,7 @@ def _deploy_feature_server(self, project: str, image_uri: str): def teardown_infra( self, project: str, tables: Sequence[FeatureView], entities: Sequence[Entity], ) -> None: - if self.online_store: - self.online_store.teardown(self.repo_config, tables, entities) + super(AwsProvider, self).teardown_infra(project, tables, entities) if ( self.repo_config.feature_server is not None diff --git a/sdk/python/feast/infra/materialization/lambda/Dockerfile b/sdk/python/feast/infra/materialization/lambda/Dockerfile new file mode 100644 index 0000000000..bbdb74bdfe --- /dev/null +++ b/sdk/python/feast/infra/materialization/lambda/Dockerfile @@ -0,0 +1,25 @@ +FROM public.ecr.aws/lambda/python:3.9 + +RUN yum install -y git + + +# Copy app handler code +COPY sdk/python/feast/infra/materialization/lambda/app.py ${LAMBDA_TASK_ROOT} + +# Copy necessary parts of the Feast codebase +COPY sdk/python sdk/python +COPY protos protos +COPY go go +COPY setup.py setup.py +COPY pyproject.toml pyproject.toml +COPY README.md README.md + +# Install Feast for AWS with Lambda dependencies +# We need this mount thingy because setuptools_scm needs access to the +# git dir to infer the version of feast we're installing. +# https://github.com/pypa/setuptools_scm#usage-from-docker +# I think it also assumes that this dockerfile is being built from the root of the directory. +RUN --mount=source=.git,target=.git,type=bind pip3 install --no-cache-dir -e '.[aws,redis]' + +# Set the CMD to your handler (could also be done as a parameter override outside of the Dockerfile) +CMD [ "app.handler" ] diff --git a/sdk/python/feast/infra/materialization/lambda/__init__.py b/sdk/python/feast/infra/materialization/lambda/__init__.py new file mode 100644 index 0000000000..d21505d91e --- /dev/null +++ b/sdk/python/feast/infra/materialization/lambda/__init__.py @@ -0,0 +1,11 @@ +from .lambda_engine import ( + LambdaMaterializationEngine, + LambdaMaterializationEngineConfig, + LambdaMaterializationJob, +) + +__all__ = [ + "LambdaMaterializationEngineConfig", + "LambdaMaterializationJob", + "LambdaMaterializationEngine", +] diff --git a/sdk/python/feast/infra/materialization/lambda/app.py b/sdk/python/feast/infra/materialization/lambda/app.py new file mode 100644 index 0000000000..ebed4c96e0 --- /dev/null +++ b/sdk/python/feast/infra/materialization/lambda/app.py @@ -0,0 +1,82 @@ +import base64 +import json +import sys +import tempfile +import traceback +from pathlib import Path + +import pyarrow.parquet as pq + +from feast import FeatureStore +from feast.constants import FEATURE_STORE_YAML_ENV_NAME +from feast.infra.materialization.local_engine import DEFAULT_BATCH_SIZE +from feast.utils import _convert_arrow_to_proto, _run_pyarrow_field_mapping + + +def handler(event, context): + """Provide an event that contains the following keys: + + - operation: one of the operations in the operations dict below + - tableName: required for operations that interact with DynamoDB + - payload: a parameter to pass to the operation being performed + """ + print("Received event: " + json.dumps(event, indent=2), flush=True) + + try: + + config_base64 = event[FEATURE_STORE_YAML_ENV_NAME] + + config_bytes = base64.b64decode(config_base64) + + # Create a new unique directory for writing feature_store.yaml + repo_path = Path(tempfile.mkdtemp()) + + with open(repo_path / "feature_store.yaml", "wb") as f: + f.write(config_bytes) + + # Initialize the feature store + store = FeatureStore(repo_path=str(repo_path.resolve())) + + view_name = event["view_name"] + view_type = event["view_type"] + path = event["path"] + + bucket = path[len("s3://") :].split("/", 1)[0] + key = path[len("s3://") :].split("/", 1)[1] + print(f"Inferred Bucket: `{bucket}` Key: `{key}`", flush=True) + + if view_type == "batch": + # TODO: This probably needs to be become `store.get_batch_feature_view` at some point. + feature_view = store.get_feature_view(view_name) + else: + feature_view = store.get_stream_feature_view(view_name) + + print(f"Got Feature View: `{feature_view}`", flush=True) + + table = pq.read_table(path) + if feature_view.batch_source.field_mapping is not None: + table = _run_pyarrow_field_mapping( + table, feature_view.batch_source.field_mapping + ) + + join_key_to_value_type = { + entity.name: entity.dtype.to_value_type() + for entity in feature_view.entity_columns + } + + written_rows = 0 + + for batch in table.to_batches(DEFAULT_BATCH_SIZE): + rows_to_write = _convert_arrow_to_proto( + batch, feature_view, join_key_to_value_type + ) + store._provider.online_write_batch( + store.config, feature_view, rows_to_write, lambda x: None, + ) + written_rows += len(rows_to_write) + return {"written_rows": written_rows} + except Exception as e: + print(f"Exception: {e}", flush=True) + print("Traceback:", flush=True) + print(traceback.format_exc(), flush=True) + sys.exit(1) diff --git a/sdk/python/feast/infra/materialization/lambda/lambda_engine.py b/sdk/python/feast/infra/materialization/lambda/lambda_engine.py new file mode 100644 index 0000000000..89a5f1a4f4 --- /dev/null +++ b/sdk/python/feast/infra/materialization/lambda/lambda_engine.py @@ -0,0 +1,238 @@ +import base64 +import json +import logging +from concurrent.futures import ThreadPoolExecutor, wait +from dataclasses import dataclass +from datetime import datetime +from typing import Callable, List, Literal, Optional, Sequence, Union + +import boto3 +from pydantic import StrictStr +from tqdm import tqdm + +from feast.batch_feature_view import BatchFeatureView +from feast.constants import FEATURE_STORE_YAML_ENV_NAME +from feast.entity import Entity +from feast.feature_view import FeatureView +from feast.infra.materialization.batch_materialization_engine import ( + BatchMaterializationEngine, + MaterializationJob, + MaterializationJobStatus, + MaterializationTask, +) +from feast.infra.offline_stores.offline_store import OfflineStore +from feast.infra.online_stores.online_store import OnlineStore +from feast.registry import BaseRegistry +from feast.repo_config import FeastConfigBaseModel, RepoConfig +from feast.stream_feature_view import StreamFeatureView +from feast.utils import _get_column_names +from feast.version import get_version + +DEFAULT_BATCH_SIZE = 10_000 + +logger = logging.getLogger(__name__) + + +class LambdaMaterializationEngineConfig(FeastConfigBaseModel): + """Batch Materialization Engine config for lambda based engine""" + + type: Literal["lambda"] = "lambda" + """ Type selector""" + + materialization_image: StrictStr + """ The URI of a container image in the Amazon ECR registry, which should be used for materialization. """ + + lambda_role: StrictStr + """ Role that should be used by the materialization lambda """ + + +@dataclass +class LambdaMaterializationJob(MaterializationJob): + def __init__(self, job_id: str, status: MaterializationJobStatus) -> None: + super().__init__() + self._job_id: str = job_id + self._status = status + self._error = None + + def status(self) -> MaterializationJobStatus: + return self._status + + def error(self) -> Optional[BaseException]: + return self._error + + def should_be_retried(self) -> bool: + return False + + def job_id(self) -> str: + return self._job_id + + def url(self) -> Optional[str]: + return None + + +class LambdaMaterializationEngine(BatchMaterializationEngine): + """ + WARNING: This engine should be considered "Alpha" functionality. + """ + + def update( + self, + project: str, + views_to_delete: Sequence[ + Union[BatchFeatureView, StreamFeatureView, FeatureView] + ], + views_to_keep: Sequence[ + Union[BatchFeatureView, StreamFeatureView, FeatureView] + ], + entities_to_delete: Sequence[Entity], + entities_to_keep: Sequence[Entity], + ): + # This should be setting up the lambda function. + r = self.lambda_client.create_function( + FunctionName=self.lambda_name, + PackageType="Image", + Role=self.repo_config.batch_engine.lambda_role, + Code={"ImageUri": self.repo_config.batch_engine.materialization_image}, + Timeout=600, + Tags={ + "feast-owned": "True", + "project": project, + "feast-sdk-version": get_version(), + }, + ) + logger.info("Creating lambda function %s, %s", self.lambda_name, r) + + logger.info("Waiting for function %s to be active", self.lambda_name) + waiter = self.lambda_client.get_waiter("function_active") + waiter.wait(FunctionName=self.lambda_name) + + def teardown_infra( + self, + project: str, + fvs: Sequence[Union[BatchFeatureView, StreamFeatureView, FeatureView]], + entities: Sequence[Entity], + ): + # This should be tearing down the lambda function. + logger.info("Tearing down lambda %s", self.lambda_name) + r = self.lambda_client.delete_function(FunctionName=self.lambda_name) + logger.info("Finished tearing down lambda %s: %s", self.lambda_name, r) + + def __init__( + self, + *, + repo_config: RepoConfig, + offline_store: OfflineStore, + online_store: OnlineStore, + **kwargs, + ): + super().__init__( + repo_config=repo_config, + offline_store=offline_store, + online_store=online_store, + **kwargs, + ) + repo_path = self.repo_config.repo_path + assert repo_path + feature_store_path = repo_path / "feature_store.yaml" + self.feature_store_base64 = str( + base64.b64encode(bytes(feature_store_path.read_text(), "UTF-8")), "UTF-8" + ) + + self.lambda_name = f"feast-materialize-{self.repo_config.project}" + if len(self.lambda_name) > 64: + self.lambda_name = self.lambda_name[:64] + self.lambda_client = boto3.client("lambda") + + def materialize( + self, registry, tasks: List[MaterializationTask] + ) -> List[MaterializationJob]: + return [ + self._materialize_one( + registry, + task.feature_view, + task.start_time, + task.end_time, + task.project, + task.tqdm_builder, + ) + for task in tasks + ] + + def _materialize_one( + self, + registry: BaseRegistry, + feature_view: Union[BatchFeatureView, StreamFeatureView, FeatureView], + start_date: datetime, + end_date: datetime, + project: str, + tqdm_builder: Callable[[int], tqdm], + ): + entities = [] + for entity_name in feature_view.entities: + entities.append(registry.get_entity(entity_name, project)) + + ( + join_key_columns, + feature_name_columns, + timestamp_field, + created_timestamp_column, + ) = _get_column_names(feature_view, entities) + + job_id = f"{feature_view.name}-{start_date}-{end_date}" + + offline_job = self.offline_store.pull_latest_from_table_or_query( + config=self.repo_config, + data_source=feature_view.batch_source, + join_key_columns=join_key_columns, + feature_name_columns=feature_name_columns, + timestamp_field=timestamp_field, + created_timestamp_column=created_timestamp_column, + start_date=start_date, + end_date=end_date, + ) + + paths = offline_job.to_remote_storage() + max_workers = len(paths) if len(paths) <= 20 else 20 + executor = ThreadPoolExecutor(max_workers=max_workers) + futures = [] + + for path in paths: + payload = { + FEATURE_STORE_YAML_ENV_NAME: self.feature_store_base64, + "view_name": feature_view.name, + "view_type": "batch", + "path": path, + } + # Invoke a lambda to materialize this file. + + logger.info("Invoking materialization for %s", path) + futures.append( + executor.submit( + self.lambda_client.invoke, + FunctionName=self.lambda_name, + InvocationType="RequestResponse", + Payload=json.dumps(payload), + ) + ) + + done, not_done = wait(futures) + logger.info("Done: %s Not Done: %s", done, not_done) + for f in done: + response = f.result() + output = json.loads(response["Payload"].read()) + + logger.info( + f"Ingested task; request id {response['ResponseMetadata']['RequestId']}, " + f"rows written: {output['written_rows']}" + ) + + for f in not_done: + response = f.result() + logger.error(f"Ingestion failed: {response}") + + return LambdaMaterializationJob( + job_id=job_id, + status=MaterializationJobStatus.SUCCEEDED + if not not_done + else MaterializationJobStatus.ERROR, + ) diff --git a/sdk/python/feast/infra/online_stores/dynamodb.py b/sdk/python/feast/infra/online_stores/dynamodb.py index 50709fa3d4..6919f2cc29 100644 --- a/sdk/python/feast/infra/online_stores/dynamodb.py +++ b/sdk/python/feast/infra/online_stores/dynamodb.py @@ -229,7 +229,8 @@ def online_read( break batch_entity_ids = { table_instance.name: { - "Keys": [{"entity_id": entity_id} for entity_id in batch] + "Keys": [{"entity_id": entity_id} for entity_id in batch], + "ConsistentRead": True, } } with tracing_span(name="remote_call"): diff --git a/sdk/python/feast/infra/passthrough_provider.py b/sdk/python/feast/infra/passthrough_provider.py index 181d46a5a8..e31eb1e177 100644 --- a/sdk/python/feast/infra/passthrough_provider.py +++ b/sdk/python/feast/infra/passthrough_provider.py @@ -22,7 +22,7 @@ from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.registry import BaseRegistry -from feast.repo_config import RepoConfig +from feast.repo_config import BATCH_ENGINE_CLASS_FOR_TYPE, RepoConfig from feast.saved_dataset import SavedDataset from feast.stream_feature_view import StreamFeatureView from feast.usage import RatioSampler, log_exceptions_and_usage, set_usage_attribute @@ -34,10 +34,6 @@ DEFAULT_BATCH_SIZE = 10_000 -BATCH_ENGINE_CLASS_FOR_TYPE = { - "local": "feast.infra.materialization.LocalMaterializationEngine", -} - class PassthroughProvider(Provider): """ @@ -73,7 +69,7 @@ def batch_engine(self) -> BatchMaterializationEngine: if self._batch_engine: return self._batch_engine else: - engine_config = self.repo_config.batch_engine_config + engine_config = self.repo_config._batch_engine_config config_is_dict = False if isinstance(engine_config, str): engine_config_type = engine_config @@ -129,6 +125,14 @@ def update_infra( entities_to_delete=entities_to_delete, partial=partial, ) + if self.batch_engine: + self.batch_engine.update( + project, + tables_to_delete, + tables_to_keep, + entities_to_delete, + entities_to_keep, + ) def teardown_infra( self, project: str, tables: Sequence[FeatureView], entities: Sequence[Entity], @@ -136,6 +140,8 @@ def teardown_infra( set_usage_attribute("provider", self.__class__.__name__) if self.online_store: self.online_store.teardown(self.repo_config, tables, entities) + if self.batch_engine: + self.batch_engine.teardown_infra(project, tables, entities) def online_write_batch( self, diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index f315023ee1..f7f564df6f 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -30,6 +30,12 @@ # These dict exists so that: # - existing values for the online store type in featurestore.yaml files continue to work in a backwards compatible way # - first party and third party implementations can use the same class loading code path. +BATCH_ENGINE_CLASS_FOR_TYPE = { + "local": "feast.infra.materialization.LocalMaterializationEngine", + "lambda": "feast.infra.materialization.lambda.lambda_engine.LambdaMaterializationEngine", +} + + ONLINE_STORE_CLASS_FOR_TYPE = { "sqlite": "feast.infra.online_stores.sqlite.SqliteOnlineStore", "datastore": "feast.infra.online_stores.datastore.DatastoreOnlineStore", @@ -120,7 +126,7 @@ class RepoConfig(FeastBaseModel): _offline_config: Any = Field(alias="offline_store") """ OfflineStoreConfig: Offline store configuration (optional depending on provider) """ - batch_engine_config: Any = Field(alias="batch_engine") + _batch_engine_config: Any = Field(alias="batch_engine") """ BatchMaterializationEngine: Batch materialization configuration (optional depending on provider)""" feature_server: Optional[Any] @@ -160,10 +166,12 @@ def __init__(self, **data: Any): self._batch_engine = None if "batch_engine" in data: - self.batch_engine_config = data["batch_engine"] + self._batch_engine_config = data["batch_engine"] + elif "batch_engine_config" in data: + self._batch_engine_config = data["batch_engine_config"] else: # Defaults to using local in-process materialization engine. - self.batch_engine_config = "local" + self._batch_engine_config = "local" if isinstance(self.feature_server, Dict): self.feature_server = get_feature_server_config_from_type( @@ -205,6 +213,22 @@ def online_store(self): return self._online_store + @property + def batch_engine(self): + if not self._batch_engine: + if isinstance(self._batch_engine_config, Dict): + self._batch_engine = get_batch_engine_config_from_type( + self._batch_engine_config["type"] + )(**self._batch_engine_config) + elif isinstance(self._batch_engine_config, str): + self._batch_engine = get_batch_engine_config_from_type( + self._batch_engine_config + )() + elif self._batch_engine_config: + self._batch_engine = self._batch_engine + + return self._batch_engine + @root_validator(pre=True) @log_exceptions def _validate_online_store_config(cls, values): @@ -382,6 +406,17 @@ def get_data_source_class_from_type(data_source_type: str): return import_class(module_name, config_class_name, "DataSource") +def get_batch_engine_config_from_type(batch_engine_type: str): + if batch_engine_type in BATCH_ENGINE_CLASS_FOR_TYPE: + batch_engine_type = BATCH_ENGINE_CLASS_FOR_TYPE[batch_engine_type] + else: + assert batch_engine_type.endswith("Engine") + module_name, batch_engine_class_type = batch_engine_type.rsplit(".", 1) + config_class_name = f"{batch_engine_class_type}Config" + + return import_class(module_name, config_class_name, config_class_name) + + def get_online_config_from_type(online_store_type: str): if online_store_type in ONLINE_STORE_CLASS_FOR_TYPE: online_store_type = ONLINE_STORE_CLASS_FOR_TYPE[online_store_type] diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index 1700970f1e..ac30149cfa 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -45,6 +45,22 @@ logger = logging.getLogger(__name__) +level = logging.INFO +logging.basicConfig( + format="%(asctime)s %(name)s %(levelname)s: %(message)s", + datefmt="%m/%d/%Y %I:%M:%S %p", + level=level, +) +# Override the logging level for already created loggers (due to loggers being created at the import time) +# Note, that format & datefmt does not need to be set, because by default child loggers don't override them + +# Also note, that mypy complains that logging.root doesn't have "manager" because of the way it's written. +# So we have to put a type ignore hint for mypy. +for logger_name in logging.root.manager.loggerDict: # type: ignore + if "feast" in logger_name: + logger = logging.getLogger(logger_name) + logger.setLevel(level) + def pytest_configure(config): if platform in ["darwin", "windows"]: diff --git a/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py b/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py index 74ce37f17a..d2e0f70ba2 100644 --- a/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py +++ b/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py @@ -1,5 +1,6 @@ import hashlib from dataclasses import dataclass +from enum import Enum from typing import Dict, Optional, Type, Union from tests.integration.feature_repos.universal.data_source_creator import ( @@ -13,6 +14,11 @@ ) +class RegistryLocation(Enum): + Local = 1 + S3 = 2 + + @dataclass(frozen=False) class IntegrationTestRepoConfig: """ @@ -25,6 +31,9 @@ class IntegrationTestRepoConfig: offline_store_creator: Type[DataSourceCreator] = FileDataSourceCreator online_store_creator: Optional[Type[OnlineStoreCreator]] = None + batch_engine: Optional[Union[str, Dict]] = "local" + registry_location: RegistryLocation = RegistryLocation.Local + full_feature_names: bool = True infer_features: bool = False python_feature_server: bool = False diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 4dc1db4a13..4300ca64b6 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -22,6 +22,7 @@ from feast.repo_config import RegistryConfig, RepoConfig from tests.integration.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, + RegistryLocation, ) from tests.integration.feature_repos.universal.data_source_creator import ( DataSourceCreator, @@ -381,8 +382,6 @@ def construct_test_environment( online_creator = None online_store = test_repo_config.online_store - repo_dir_name = tempfile.mkdtemp() - if test_repo_config.python_feature_server and test_repo_config.provider == "aws": from feast.infra.feature_servers.aws_lambda.config import ( AwsLambdaFeatureServerConfig, @@ -393,22 +392,30 @@ def construct_test_environment( execution_role_name="arn:aws:iam::402087665549:role/lambda_execution_role", ) - registry = ( - f"s3://feast-integration-tests/registries/{project}/registry.db" - ) # type: Union[str, RegistryConfig] else: feature_server = LocalFeatureServerConfig( feature_logging=FeatureLoggingConfig(enabled=True) ) + + repo_dir_name = tempfile.mkdtemp() + if ( + test_repo_config.python_feature_server and test_repo_config.provider == "aws" + ) or test_repo_config.registry_location == RegistryLocation.S3: + registry: Union[str, RegistryConfig] = ( + f"s3://feast-integration-tests/registries/{project}/registry.db" + ) + else: registry = RegistryConfig( path=str(Path(repo_dir_name) / "registry.db"), cache_ttl_seconds=1, ) + config = RepoConfig( registry=registry, project=project, provider=test_repo_config.provider, offline_store=offline_store_config, online_store=online_store, + batch_engine=test_repo_config.batch_engine, repo_path=repo_dir_name, feature_server=feature_server, go_feature_retrieval=test_repo_config.go_feature_retrieval, diff --git a/sdk/python/tests/integration/materialization/test_lambda.py b/sdk/python/tests/integration/materialization/test_lambda.py new file mode 100644 index 0000000000..66cd2c5eb9 --- /dev/null +++ b/sdk/python/tests/integration/materialization/test_lambda.py @@ -0,0 +1,200 @@ +import math +import time +from datetime import datetime, timedelta +from typing import Optional + +import pandas as pd +import pytest +from pytz import utc + +from feast import Entity, Feature, FeatureStore, FeatureView, ValueType +from tests.data.data_creator import create_basic_driver_dataset +from tests.integration.feature_repos.integration_test_repo_config import ( + IntegrationTestRepoConfig, + RegistryLocation, +) +from tests.integration.feature_repos.repo_configuration import ( + construct_test_environment, +) +from tests.integration.feature_repos.universal.data_sources.redshift import ( + RedshiftDataSourceCreator, +) + + +@pytest.mark.integration +def test_lambda_materialization(): + lambda_config = IntegrationTestRepoConfig( + provider="aws", + online_store={"type": "dynamodb", "region": "us-west-2"}, + offline_store_creator=RedshiftDataSourceCreator, + batch_engine={ + "type": "lambda", + "materialization_image": "402087665549.dkr.ecr.us-west-2.amazonaws.com/feast-lambda-consumer:v1", + "lambda_role": "arn:aws:iam::402087665549:role/lambda_execution_role", + }, + registry_location=RegistryLocation.S3, + ) + lambda_environment = construct_test_environment(lambda_config, None) + + df = create_basic_driver_dataset() + ds = lambda_environment.data_source_creator.create_data_source( + df, lambda_environment.feature_store.project, field_mapping={"ts_1": "ts"}, + ) + + fs = lambda_environment.feature_store + driver = Entity(name="driver_id", join_key="driver_id", value_type=ValueType.INT64,) + + driver_stats_fv = FeatureView( + name="driver_hourly_stats", + entities=["driver_id"], + ttl=timedelta(weeks=52), + features=[Feature(name="value", dtype=ValueType.FLOAT)], + batch_source=ds, + ) + + try: + + fs.apply([driver, driver_stats_fv]) + + print(df) + + # materialization is run in two steps and + # we use timestamp from generated dataframe as a split point + split_dt = df["ts_1"][4].to_pydatetime() - timedelta(seconds=1) + + print(f"Split datetime: {split_dt}") + + run_offline_online_store_consistency_test(fs, driver_stats_fv, split_dt) + finally: + fs.teardown() + + +def check_offline_and_online_features( + fs: FeatureStore, + fv: FeatureView, + driver_id: int, + event_timestamp: datetime, + expected_value: Optional[float], + full_feature_names: bool, + check_offline_store: bool = True, +) -> None: + # Check online store + response_dict = fs.get_online_features( + [f"{fv.name}:value"], + [{"driver_id": driver_id}], + full_feature_names=full_feature_names, + ).to_dict() + + if full_feature_names: + + if expected_value: + assert response_dict[f"{fv.name}__value"][0], f"Response: {response_dict}" + assert ( + abs(response_dict[f"{fv.name}__value"][0] - expected_value) < 1e-6 + ), f"Response: {response_dict}, Expected: {expected_value}" + else: + assert response_dict[f"{fv.name}__value"][0] is None + else: + if expected_value: + assert response_dict["value"][0], f"Response: {response_dict}" + assert ( + abs(response_dict["value"][0] - expected_value) < 1e-6 + ), f"Response: {response_dict}, Expected: {expected_value}" + else: + assert response_dict["value"][0] is None + + # Check offline store + if check_offline_store: + df = fs.get_historical_features( + entity_df=pd.DataFrame.from_dict( + {"driver_id": [driver_id], "event_timestamp": [event_timestamp]} + ), + features=[f"{fv.name}:value"], + full_feature_names=full_feature_names, + ).to_df() + + if full_feature_names: + if expected_value: + assert ( + abs( + df.to_dict(orient="list")[f"{fv.name}__value"][0] + - expected_value + ) + < 1e-6 + ) + else: + assert not df.to_dict(orient="list")[f"{fv.name}__value"] or math.isnan( + df.to_dict(orient="list")[f"{fv.name}__value"][0] + ) + else: + if expected_value: + assert ( + abs(df.to_dict(orient="list")["value"][0] - expected_value) < 1e-6 + ) + else: + assert not df.to_dict(orient="list")["value"] or math.isnan( + df.to_dict(orient="list")["value"][0] + ) + + +def run_offline_online_store_consistency_test( + fs: FeatureStore, fv: FeatureView, split_dt: datetime +) -> None: + now = datetime.utcnow() + + full_feature_names = True + check_offline_store: bool = True + + # Run materialize() + # use both tz-naive & tz-aware timestamps to test that they're both correctly handled + start_date = (now - timedelta(hours=5)).replace(tzinfo=utc) + end_date = split_dt + fs.materialize(feature_views=[fv.name], start_date=start_date, end_date=end_date) + + time.sleep(10) + + # check result of materialize() + check_offline_and_online_features( + fs=fs, + fv=fv, + driver_id=1, + event_timestamp=end_date, + expected_value=0.3, + full_feature_names=full_feature_names, + check_offline_store=check_offline_store, + ) + + check_offline_and_online_features( + fs=fs, + fv=fv, + driver_id=2, + event_timestamp=end_date, + expected_value=None, + full_feature_names=full_feature_names, + check_offline_store=check_offline_store, + ) + + # check prior value for materialize_incremental() + check_offline_and_online_features( + fs=fs, + fv=fv, + driver_id=3, + event_timestamp=end_date, + expected_value=4, + full_feature_names=full_feature_names, + check_offline_store=check_offline_store, + ) + + # run materialize_incremental() + fs.materialize_incremental(feature_views=[fv.name], end_date=now) + + # check result of materialize_incremental() + check_offline_and_online_features( + fs=fs, + fv=fv, + driver_id=3, + event_timestamp=now, + expected_value=5, + full_feature_names=full_feature_names, + check_offline_store=check_offline_store, + ) From 1603c9e7765e08bb1832c03b66b754afbf8a9b4d Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Thu, 14 Jul 2022 11:36:22 -0700 Subject: [PATCH 33/73] fix: Fix nightly ci again (#2939) * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang --- .github/workflows/nightly-ci.yml | 67 ++++++++++++++++++++++++++++---- 1 file changed, 59 insertions(+), 8 deletions(-) diff --git a/.github/workflows/nightly-ci.yml b/.github/workflows/nightly-ci.yml index f125ebdc23..fead512408 100644 --- a/.github/workflows/nightly-ci.yml +++ b/.github/workflows/nightly-ci.yml @@ -18,14 +18,68 @@ jobs: steps: - uses: actions/checkout@v2 with: - ref: develop + ref: master - id: check_date name: Check if there were commits in the last day if: ${{ github.event_name == 'schedule' }} run: echo '::set-output name=WAS_EDITED::'$(test -n "$(git log --format=%H --since='24 hours ago')" && echo 'true' || echo 'false') - + build-docker-image: + needs: [check_date] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + ref: master + submodules: recursive + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + with: + install: true + - name: Set up AWS SDK + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-west-2 + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v1 + - name: Set ECR image tag + id: image-tag + run: echo "::set-output name=DOCKER_IMAGE_TAG::`git rev-parse HEAD`" + - name: Cache Public ECR Image + id: lambda_python_3_9 + uses: actions/cache@v2 + with: + path: ~/cache + key: lambda_python_3_9 + - name: Handle Cache Miss (pull public ECR image & save it to tar file) + if: steps.cache-primes.outputs.cache-hit != 'true' + run: | + mkdir -p ~/cache + docker pull public.ecr.aws/lambda/python:3.9 + docker save public.ecr.aws/lambda/python:3.9 -o ~/cache/lambda_python_3_9.tar + - name: Handle Cache Hit (load docker image from tar file) + if: steps.cache-primes.outputs.cache-hit == 'true' + run: | + docker load -i ~/cache/lambda_python_3_9.tar + - name: Build and push + env: + ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} + ECR_REPOSITORY: feast-python-server + run: | + docker build \ + --file sdk/python/feast/infra/feature_servers/aws_lambda/Dockerfile \ + --tag $ECR_REGISTRY/$ECR_REPOSITORY:${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }} \ + --load \ + . + docker push $ECR_REGISTRY/$ECR_REPOSITORY:${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }} + outputs: + DOCKER_IMAGE_TAG: ${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }} integration-test-python: - needs: check_date + needs: [check_date, build-docker-image] runs-on: ${{ matrix.os }} strategy: fail-fast: false @@ -48,16 +102,13 @@ jobs: steps: - uses: actions/checkout@v2 with: - # pull_request_target runs the workflow in the context of the base repo - # as such actions/checkout needs to be explicit configured to retrieve - # code from the PR. - ref: refs/pull/${{ github.event.pull_request.number }}/merge + ref: master submodules: recursive - name: Setup Python uses: actions/setup-python@v2 id: setup-python with: - python-version: ${{ os.PYTHON }} + python-version: ${{ matrix.python-version }} architecture: x64 - name: Setup Go id: setup-go From 268f28db37318f6be3872e4bde5de05f377eac69 Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Thu, 14 Jul 2022 18:32:22 -0700 Subject: [PATCH 34/73] chore: Fixes and Readme for python<>go interface (#2936) * chore: Fixes and Readme for python<>go interface Signed-off-by: Achal Shah * cr Signed-off-by: Achal Shah * Switch to cgo allocator Signed-off-by: Felix Wang * Fix memory leak Signed-off-by: Felix Wang * Fix another memory leak Signed-off-by: Felix Wang * Do not use cgo allocator for memory buffers Signed-off-by: Felix Wang * Switch to cgo allocator for memory buffers Signed-off-by: Felix Wang * Switch test to pass Signed-off-by: Felix Wang * Use more idiomatic way to test truth Signed-off-by: Felix Wang * Update docs Signed-off-by: Felix Wang Co-authored-by: Felix Wang --- go/README.md | 109 ++++++++++++++++++ go/embedded/online_features.go | 37 +++++- go/internal/feast/featurestore.go | 2 +- go/internal/feast/onlineserving/serving.go | 10 ++ .../feast/server/logging/memorybuffer.go | 2 +- .../feast/server/logging/memorybuffer_test.go | 4 +- .../embedded_go/online_features_service.py | 1 + 7 files changed, 158 insertions(+), 7 deletions(-) create mode 100644 go/README.md diff --git a/go/README.md b/go/README.md new file mode 100644 index 0000000000..0bca470919 --- /dev/null +++ b/go/README.md @@ -0,0 +1,109 @@ +This directory contains the Go logic that's executed by the `EmbeddedOnlineFeatureServer` from Python. + +## Building and Linking +[gopy](https://github.com/go-python/gopy) generates (and compiles) a CPython extension module from a Go package. That's what we're using here, as visible in [setup.py](../setup.py). + +Under the hood, gopy invokes `go build`, and then templates `cgo` stubs for the Go module that exposes the public functions from the Go module as C functions. +For our project, this stuff can be found at `sdk/python/feast/embedded_go/lib/embedded.go` & `sdk/python/feast/embedded_go/lib/embedded_go.h` after running `make compile-go-lib`. + +## Arrow memory management +Understanding this is the trickiest part of this integration. + +At a high level, when using the Python<>Go integration, the Python layer exports request data into an [Arrow Record batch](https://arrow.apache.org/docs/python/data.html) which is transferred to Go using Arrow's zero copy mechanism. +Similarly, the Go layer converts feature values read from the online store into a Record Batch that's exported to Python using the same mechanics. + +The first thing to note is that from the Python perspective, all the export logic assumes that we're exporting to & importing from C, not Go. This is because pyarrow only interops with C, and the fact we're using Go is an implementation detail not relevant to the Python layer. + +### Export Entities & Request data from Python to Go +The code exporting to C is this, in [online_feature_service.py](../sdk/python/feast/embedded_go/online_features_service.py) +``` +( + entities_c_schema, + entities_ptr_schema, + entities_c_array, + entities_ptr_array, +) = allocate_schema_and_array() +( + req_data_c_schema, + req_data_ptr_schema, + req_data_c_array, + req_data_ptr_array, +) = allocate_schema_and_array() + +batch, schema = map_to_record_batch(entities, join_keys_types) +schema._export_to_c(entities_ptr_schema) +batch._export_to_c(entities_ptr_array) + +batch, schema = map_to_record_batch(request_data) +schema._export_to_c(req_data_ptr_schema) +batch._export_to_c(req_data_ptr_array) +``` + +Under the hood, `allocate_schema_and_array` allocates a pointer (`struct ArrowSchema*` and `struct ArrowArray*`) in native memory (i.e. the C layer) using `cffi`. +Next, the RecordBatch exports to this pointer using [`_export_to_c`](https://github.com/apache/arrow/blob/master/python/pyarrow/table.pxi#L2509), which uses [`ExportRecordBatch`](https://arrow.apache.org/docs/cpp/api/c_abi.html#_CPPv417ExportRecordBatchRK11RecordBatchP10ArrowArrayP11ArrowSchema) under the hood. + +As per the documentation for ExportRecordBatch: +> Status ExportRecordBatch(const RecordBatch &batch, struct ArrowArray *out, struct ArrowSchema *out_schema = NULLPTR) +> Export C++ RecordBatch using the C data interface format. +> +> The record batch is exported as if it were a struct array. The resulting ArrowArray struct keeps the record batch data and buffers alive until its release callback is called by the consumer. + +This is why `GetOnlineFeatures()` in `online_features.go` calls `record.Release()` as below: +``` +entitiesRecord, err := readArrowRecord(entities) +if err != nil { + return err +} +defer entitiesRecord.Release() +... +requestDataRecords, err := readArrowRecord(requestData) +if err != nil { + return err +} +defer requestDataRecords.Release() +``` + +Additionally, we need to pass in a pair of pointers to `GetOnlineFeatures()` that are populated by the Go layer, and the resultant feature values can be passed back to Python (via the C layer) using zero-copy semantics. +That happens as follows: +``` +( + features_c_schema, + features_ptr_schema, + features_c_array, + features_ptr_array, +) = allocate_schema_and_array() + +... + +record_batch = pa.RecordBatch._import_from_c( + features_ptr_array, features_ptr_schema +) +``` + +The corresponding Go code that exports this data is: +``` +result := array.NewRecord(arrow.NewSchema(outputFields, nil), outputColumns, int64(numRows)) + +cdata.ExportArrowRecordBatch(result, + cdata.ArrayFromPtr(output.DataPtr), + cdata.SchemaFromPtr(output.SchemaPtr)) +``` + +The documentation for `ExportArrowRecordBatch` is great. It has this super useful caveat: + +> // The release function on the populated CArrowArray will properly decrease the reference counts, +> // and release the memory if the record has already been released. But since this must be explicitly +> // done, make sure it is released so that you do not create a memory leak. + +This implies that the reciever is on the hook for explicitly releasing this memory. + +However, we're using `_import_from_c`, which uses [`ImportRecordBatch`](https://arrow.apache.org/docs/cpp/api/c_abi.html#_CPPv417ImportRecordBatchP10ArrowArrayP11ArrowSchema), which implies that the receiver of the RecordBatch is the new owner of the data. +This is wrapped by pyarrow - and when the corresponding python object goes out of scope, it should clean up the underlying record batch. + +Another thing to note (which I'm not sure may be the source of issues) is that Arrow has the concept of [Memory Pools](https://arrow.apache.org/docs/python/api/memory.html#memory-pools). +Memory pools can be set in python as well as in Go. I *believe* that if we use the CGoArrowAllocator, that uses whatever pool C++ uses, which should be the same as the one used by PyArrow. But this should be vetted. + + +### References +- https://arrow.apache.org/docs/format/CDataInterface.html#memory-management +- https://arrow.apache.org/docs/python/memory.html \ No newline at end of file diff --git a/go/embedded/online_features.go b/go/embedded/online_features.go index e5860507e4..7fd34d16e4 100644 --- a/go/embedded/online_features.go +++ b/go/embedded/online_features.go @@ -33,6 +33,11 @@ type OnlineFeatureService struct { grpcStopCh chan os.Signal httpStopCh chan os.Signal + statusColumnBuildersToRelease []*array.Int32Builder + tsColumnBuildersToRelease []*array.Int64Builder + arraysToRelease []arrow.Array + resultsToRelease []arrow.Record + err error } @@ -143,6 +148,7 @@ func (s *OnlineFeatureService) GetOnlineFeatures( if err != nil { return err } + defer entitiesRecord.Release() numRows := entitiesRecord.Column(0).Len() @@ -155,6 +161,7 @@ func (s *OnlineFeatureService) GetOnlineFeatures( if err != nil { return err } + defer requestDataRecords.Release() requestDataProto, err := recordToProto(requestDataRecords) if err != nil { @@ -178,6 +185,24 @@ func (s *OnlineFeatureService) GetOnlineFeatures( return err } + // Release all objects that are no longer required. + for _, statusColumnBuilderToRelease := range s.statusColumnBuildersToRelease { + statusColumnBuilderToRelease.Release() + } + for _, tsColumnBuilderToRelease := range s.tsColumnBuildersToRelease { + tsColumnBuilderToRelease.Release() + } + for _, arrayToRelease := range s.arraysToRelease { + arrayToRelease.Release() + } + for _, resultsToRelease := range s.resultsToRelease { + resultsToRelease.Release() + } + s.statusColumnBuildersToRelease = nil + s.tsColumnBuildersToRelease = nil + s.arraysToRelease = nil + s.resultsToRelease = nil + outputFields := make([]arrow.Field, 0) outputColumns := make([]arrow.Array, 0) pool := memory.NewCgoArrowAllocator() @@ -210,13 +235,19 @@ func (s *OnlineFeatureService) GetOnlineFeatures( } tsColumn := tsColumnBuilder.NewArray() outputColumns = append(outputColumns, tsColumn) + + // Mark builders and arrays for release. + s.statusColumnBuildersToRelease = append(s.statusColumnBuildersToRelease, statusColumnBuilder) + s.tsColumnBuildersToRelease = append(s.tsColumnBuildersToRelease, tsColumnBuilder) + s.arraysToRelease = append(s.arraysToRelease, statusColumn) + s.arraysToRelease = append(s.arraysToRelease, tsColumn) + s.arraysToRelease = append(s.arraysToRelease, featureVector.Values) } result := array.NewRecord(arrow.NewSchema(outputFields, nil), outputColumns, int64(numRows)) + s.resultsToRelease = append(s.resultsToRelease, result) - cdata.ExportArrowRecordBatch(result, - cdata.ArrayFromPtr(output.DataPtr), - cdata.SchemaFromPtr(output.SchemaPtr)) + cdata.ExportArrowRecordBatch(result, cdata.ArrayFromPtr(output.DataPtr), cdata.SchemaFromPtr(output.SchemaPtr)) return nil } diff --git a/go/internal/feast/featurestore.go b/go/internal/feast/featurestore.go index ad1f94a4ba..ed38411460 100644 --- a/go/internal/feast/featurestore.go +++ b/go/internal/feast/featurestore.go @@ -113,7 +113,7 @@ func (fs *FeatureStore) GetOnlineFeatures( } result := make([]*onlineserving.FeatureVector, 0) - arrowMemory := memory.NewGoAllocator() + arrowMemory := memory.NewCgoArrowAllocator() featureViews := make([]*model.FeatureView, len(requestedFeatureViews)) index := 0 for _, featuresAndView := range requestedFeatureViews { diff --git a/go/internal/feast/onlineserving/serving.go b/go/internal/feast/onlineserving/serving.go index e2a2df923b..3c6f545153 100644 --- a/go/internal/feast/onlineserving/serving.go +++ b/go/internal/feast/onlineserving/serving.go @@ -415,6 +415,8 @@ func KeepOnlyRequestedFeatures( vectorsByName := make(map[string]*FeatureVector) expectedVectors := make([]*FeatureVector, 0) + usedVectors := make(map[string]bool) + for _, vector := range vectors { vectorsByName[vector.Name] = vector } @@ -438,6 +440,14 @@ func KeepOnlyRequestedFeatures( return nil, fmt.Errorf("requested feature %s can't be retrieved", featureRef) } expectedVectors = append(expectedVectors, vectorsByName[qualifiedName]) + usedVectors[qualifiedName] = true + } + + // Free arrow arrays for vectors that were not used. + for _, vector := range vectors { + if _, ok := usedVectors[vector.Name]; !ok { + vector.Values.Release() + } } return expectedVectors, nil diff --git a/go/internal/feast/server/logging/memorybuffer.go b/go/internal/feast/server/logging/memorybuffer.go index 9ffb0ff73b..c9f00218df 100644 --- a/go/internal/feast/server/logging/memorybuffer.go +++ b/go/internal/feast/server/logging/memorybuffer.go @@ -128,7 +128,7 @@ func getArrowSchema(schema *FeatureServiceSchema) (*arrow.Schema, error) { // and writes them to arrow table. // Returns arrow table that contains all of the logs in columnar format. func (b *MemoryBuffer) convertToArrowRecord() (arrow.Record, error) { - arrowMemory := memory.NewGoAllocator() + arrowMemory := memory.NewCgoArrowAllocator() numRows := len(b.logs) columns := make(map[string][]*types.Value) diff --git a/go/internal/feast/server/logging/memorybuffer_test.go b/go/internal/feast/server/logging/memorybuffer_test.go index 94f0f86ef0..ec83680f4f 100644 --- a/go/internal/feast/server/logging/memorybuffer_test.go +++ b/go/internal/feast/server/logging/memorybuffer_test.go @@ -118,7 +118,7 @@ func TestSerializeToArrowTable(t *testing.T) { LogTimestamp: time.Now(), }) - pool := memory.NewGoAllocator() + pool := memory.NewCgoArrowAllocator() builder := array.NewRecordBuilder(pool, b.arrowSchema) defer builder.Release() @@ -159,7 +159,7 @@ func TestSerializeToArrowTable(t *testing.T) { expectedRecord := builder.NewRecord() assert.Nil(t, err) for colIdx := 0; colIdx < int(record.NumCols()); colIdx++ { - assert.Equal(t, expectedRecord.Column(colIdx), record.Column(colIdx), "Columns with idx %d are not equal", colIdx) + assert.True(t, array.Equal(expectedRecord.Column(colIdx), record.Column(colIdx)), "Columns with idx %d are not equal", colIdx) } } diff --git a/sdk/python/feast/embedded_go/online_features_service.py b/sdk/python/feast/embedded_go/online_features_service.py index 3081843778..d9b34b2414 100644 --- a/sdk/python/feast/embedded_go/online_features_service.py +++ b/sdk/python/feast/embedded_go/online_features_service.py @@ -147,6 +147,7 @@ def get_online_features( features_ptr_array, features_ptr_schema ) resp = record_batch_to_online_response(record_batch) + del record_batch return OnlineResponse(resp) def start_grpc_server( From ba2dcf13fe9dc4c082816a737100e00e3e9a8ad2 Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Thu, 14 Jul 2022 23:32:02 -0700 Subject: [PATCH 35/73] fix: Update gopy to point to fork to resolve github annotation errors. (#2940) * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang --- go.mod | 2 ++ go.sum | 10 ++++++++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/go.mod b/go.mod index ef64636e73..3c05383ffc 100644 --- a/go.mod +++ b/go.mod @@ -2,6 +2,8 @@ module github.com/feast-dev/feast go 1.17 +replace github.com/go-python/gopy v0.4.4 => github.com/feast-dev/gopy v0.4.1-0.20220714211711-252048177d85 + require ( github.com/apache/arrow/go/v8 v8.0.0 github.com/ghodss/yaml v1.0.0 diff --git a/go.sum b/go.sum index d0b7f8fcd8..11f1ba4d74 100644 --- a/go.sum +++ b/go.sum @@ -87,6 +87,14 @@ github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go. github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/feast-dev/gopy v0.4.1-0.20220714205859-591500e3215f h1:tTjEpVu4H/ZGh4wo3WETbA9dutNM6bXMXvyZbb9GLCs= +github.com/feast-dev/gopy v0.4.1-0.20220714205859-591500e3215f/go.mod h1:tlA/KcD7rM8B+NQJR4SASwiinfKY0aiMFanHszR8BZA= +github.com/feast-dev/gopy v0.4.1-0.20220714211038-aa312c13fd79 h1:oFj6GDGR8E4S5GeMyLBvaKtvMZxj3hHqsB5Xndjxjz8= +github.com/feast-dev/gopy v0.4.1-0.20220714211038-aa312c13fd79/go.mod h1:tlA/KcD7rM8B+NQJR4SASwiinfKY0aiMFanHszR8BZA= +github.com/feast-dev/gopy v0.4.1-0.20220714211330-67b016d61ed4 h1:UfzPdqqAfrt8f+jDIY61lbzqFZYsX2BhVyNcCbdpE+U= +github.com/feast-dev/gopy v0.4.1-0.20220714211330-67b016d61ed4/go.mod h1:tlA/KcD7rM8B+NQJR4SASwiinfKY0aiMFanHszR8BZA= +github.com/feast-dev/gopy v0.4.1-0.20220714211711-252048177d85 h1:BKmfqWiDbxvviB6vemPbbNjF+ywRsBMCdk1QvrcGgkc= +github.com/feast-dev/gopy v0.4.1-0.20220714211711-252048177d85/go.mod h1:tlA/KcD7rM8B+NQJR4SASwiinfKY0aiMFanHszR8BZA= github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= @@ -110,8 +118,6 @@ github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9 github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= -github.com/go-python/gopy v0.4.4 h1:3LTsrfVcmg2VEM6wU+eh4d9EZn5H2iogObXjiQHrF8Q= -github.com/go-python/gopy v0.4.4/go.mod h1:tlA/KcD7rM8B+NQJR4SASwiinfKY0aiMFanHszR8BZA= github.com/go-redis/redis/v8 v8.11.4 h1:kHoYkfZP6+pe04aFTnhDH6GDROa5yJdHJVNxV3F46Tg= github.com/go-redis/redis/v8 v8.11.4/go.mod h1:2Z2wHZXdQpCDXEGzqMockDpNyYvi2l4Pxt6RJr792+w= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= From d25df832f5dfccac064e5022f4dd2b505a8fb677 Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Fri, 15 Jul 2022 13:18:09 -0700 Subject: [PATCH 36/73] chore: More automated upgrades in repo definitions (#2941) Signed-off-by: Achal Shah --- sdk/python/feast/repo_upgrade.py | 103 +++++++++++++++++++++++++++++++ 1 file changed, 103 insertions(+) diff --git a/sdk/python/feast/repo_upgrade.py b/sdk/python/feast/repo_upgrade.py index 5c8d7433b2..6aa7a2cc1d 100644 --- a/sdk/python/feast/repo_upgrade.py +++ b/sdk/python/feast/repo_upgrade.py @@ -3,6 +3,7 @@ from typing import Dict, List from bowler import Query +from fissix.fixer_util import touch_import from fissix.pgen2 import token from fissix.pygram import python_symbols from fissix.pytree import Node @@ -30,6 +31,39 @@ def __init__(self, repo_path: str, write: bool): def upgrade(self): self.remove_date_partition_column() + self.rename_features_to_schema() + + def rename_inputs_to_sources(self): + def _change_argument_transform(node, capture, filename) -> None: + children = node.children + self.rename_arguments_in_children(children, {"inputs": "sources"}) + + PATTERN = """ + decorator< + any * + "on_demand_feature_view" + any * + > + """ + + Query(self.repo_files).select(PATTERN).modify( + _change_argument_transform + ).execute(write=self.write, interactive=False) + + def rename_features_to_schema(self): + Query(str(self.repo_path)).select_class("Feature").modify( + self.import_remover("Feature") + ).execute(interactive=False, write=self.write) + + def _rename_class_name( + node: Node, capture: Dict[str, Node], filename: str + ) -> None: + self.rename_class_call(node, "Field") + touch_import("feast", "Field", node) + + Query(self.repo_files).select_class("Feature").is_call().modify( + _rename_class_name + ).execute(write=self.write, interactive=False) def remove_date_partition_column(self): def _remove_date_partition_column( @@ -42,6 +76,52 @@ def _remove_date_partition_column( _remove_date_partition_column ).execute(write=self.write, interactive=False) + @staticmethod + def rename_arguments_in_children( + children: List[Node], renames: Dict[str, str] + ) -> None: + """ + Renames the arguments in the children list of a node by searching for the + argument list or trailing list and renaming all keys in `renames` dict to + corresponding value. + """ + for child in children: + if not isinstance(child, Node): + continue + if ( + child.type == python_symbols.arglist + or child.type == python_symbols.trailer + ): + if not child.children: + continue + for _, child in enumerate(child.children): + if not isinstance(child, Node): + continue + else: + if child.type == python_symbols.argument: + if child.children[0].value in renames: + child.children[0].value = renames[ + child.children[0].value + ] + + @staticmethod + def rename_class_call(node: Node, new_class_name: str): + """ + Rename the class being instantiated. + f = Feature( + name="driver_id", + join_key="driver_id", + ) + into + f = Field( + name="driver_id", + ) + This method assumes that node represents a class call that already has an arglist. + """ + if len(node.children) < 2 or len(node.children[1].children) < 2: + raise ValueError(f"Expected a class call with an arglist but got {node}.") + node.children[0].value = new_class_name + @staticmethod def remove_argument_transform(node: Node, argument: str): """ @@ -70,3 +150,26 @@ def remove_argument_transform(node: Node, argument: str): class_args.pop(i) if i < len(class_args) and class_args[i].type == token.NEWLINE: class_args.pop(i) + + @staticmethod + def import_remover(class_name): + def remove_import_transformer(node, capture, filename): + if "class_import" in capture and capture["class_name"].value == class_name: + if capture["class_import"].type == python_symbols.import_from: + import_from_stmt = node.children + imported_classes = import_from_stmt[3] + + if len(imported_classes.children) > 1: + # something of the form `from feast import A, ValueType` + for i, class_leaf in enumerate(imported_classes.children): + if class_leaf.value == class_name: + imported_classes.children.pop(i) + if i == len(imported_classes.children): + imported_classes.children.pop(i - 1) + else: + imported_classes.children.pop(i) + else: + # something of the form `from feast import ValueType` + node.parent.children.remove(node) + + return remove_import_transformer From 476fccd322c684f44fd17c942341e4d0082c8526 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Jul 2022 21:29:09 -0700 Subject: [PATCH 37/73] chore(deps): Bump aws-java-sdk-s3 from 1.12.110 to 1.12.261 in /java/serving (#2947) chore(deps): Bump aws-java-sdk-s3 in /java/serving Bumps [aws-java-sdk-s3](https://github.com/aws/aws-sdk-java) from 1.12.110 to 1.12.261. - [Release notes](https://github.com/aws/aws-sdk-java/releases) - [Changelog](https://github.com/aws/aws-sdk-java/blob/master/CHANGELOG.md) - [Commits](https://github.com/aws/aws-sdk-java/compare/1.12.110...1.12.261) --- updated-dependencies: - dependency-name: com.amazonaws:aws-java-sdk-s3 dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- java/serving/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/java/serving/pom.xml b/java/serving/pom.xml index 47a636d7d5..f173cdd5fe 100644 --- a/java/serving/pom.xml +++ b/java/serving/pom.xml @@ -235,7 +235,7 @@ com.amazonaws aws-java-sdk-s3 - 1.12.110 + 1.12.261 From d593351e08051c3d389808528ce443cb0471fac4 Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Mon, 18 Jul 2022 16:11:38 -0400 Subject: [PATCH 38/73] chore: Add project metadata to registry (#2938) * chore: Add project metadata to registry Signed-off-by: Danny Chiao * lint Signed-off-by: Danny Chiao * add to file registry Signed-off-by: Danny Chiao * add to sql registry Signed-off-by: Danny Chiao * fixes Signed-off-by: Danny Chiao * fixes Signed-off-by: Danny Chiao * fixes Signed-off-by: Danny Chiao * fix Signed-off-by: Danny Chiao * lint Signed-off-by: Danny Chiao * fix test Signed-off-by: Danny Chiao * add tests Signed-off-by: Danny Chiao * make backwards compatible Signed-off-by: Danny Chiao * update makefile for passing local tests Signed-off-by: Danny Chiao * update makefile for passing local tests Signed-off-by: Danny Chiao * update makefile for passing local tests Signed-off-by: Danny Chiao * update makefile for passing local tests Signed-off-by: Danny Chiao --- .../workflows/pr_local_integration_tests.yml | 17 ++ Makefile | 3 +- protos/feast/core/Registry.proto | 8 +- .../source/feast.infra.materialization.rst | 29 +++ sdk/python/docs/source/feast.infra.rst | 1 + sdk/python/docs/source/feast.rst | 16 ++ sdk/python/feast/driver_test_data.py | 2 +- sdk/python/feast/feature_store.py | 6 +- sdk/python/feast/infra/registry_stores/sql.py | 73 ++++++- sdk/python/feast/project_metadata.py | 111 ++++++++++ sdk/python/feast/registry.py | 192 ++++++++++++++---- sdk/python/feast/templates/gcp/driver_repo.py | 1 + sdk/python/feast/templates/hbase/example.py | 1 + sdk/python/feast/templates/local/example.py | 1 + sdk/python/feast/usage.py | 5 + .../feature_repos/repo_configuration.py | 2 +- .../integration/registration/test_registry.py | 36 +++- .../registration/test_sql_registry.py | 14 ++ 18 files changed, 450 insertions(+), 68 deletions(-) create mode 100644 sdk/python/docs/source/feast.infra.materialization.rst create mode 100644 sdk/python/feast/project_metadata.py diff --git a/.github/workflows/pr_local_integration_tests.yml b/.github/workflows/pr_local_integration_tests.yml index 0847083393..4c87780888 100644 --- a/.github/workflows/pr_local_integration_tests.yml +++ b/.github/workflows/pr_local_integration_tests.yml @@ -67,6 +67,23 @@ jobs: sudo apt install -y -V libarrow-dev - name: Install dependencies run: make install-python-ci-dependencies + - name: Set up gcloud SDK # TODO(adchia): remove this dependency + uses: google-github-actions/setup-gcloud@v0 + with: + project_id: ${{ secrets.GCP_PROJECT_ID }} + service_account_key: ${{ secrets.GCP_SA_KEY }} + export_default_credentials: true + - name: Use gcloud CLI + run: gcloud info - name: Test local integration tests if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak run: make test-python-integration-local + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: ./coverage.xml + flags: localintegrationtests + env_vars: OS,PYTHON + fail_ci_if_error: true + verbose: true diff --git a/Makefile b/Makefile index 6d733ac61f..915ac907f7 100644 --- a/Makefile +++ b/Makefile @@ -77,7 +77,8 @@ test-python-integration-local: python -m pytest -n 8 --integration \ -k "not test_apply_entity_integration and \ not test_apply_feature_view_integration and \ - not test_apply_data_source_integration" \ + not test_apply_data_source_integration and \ + not test_lambda_materialization" \ sdk/python/tests \ ) || echo "This script uses Docker, and it isn't running - please start the Docker Daemon and try again!"; diff --git a/protos/feast/core/Registry.proto b/protos/feast/core/Registry.proto index 19f17a8158..7d80d8c837 100644 --- a/protos/feast/core/Registry.proto +++ b/protos/feast/core/Registry.proto @@ -34,7 +34,7 @@ import "feast/core/SavedDataset.proto"; import "feast/core/ValidationProfile.proto"; import "google/protobuf/timestamp.proto"; -// Next id: 15 +// Next id: 16 message Registry { repeated Entity entities = 1; repeated FeatureTable feature_tables = 2; @@ -47,9 +47,15 @@ message Registry { repeated SavedDataset saved_datasets = 11; repeated ValidationReference validation_references = 13; Infra infra = 10; + // Tracking metadata of Feast by project + repeated ProjectMetadata project_metadata = 15; string registry_schema_version = 3; // to support migrations; incremented when schema is changed string version_id = 4; // version id, random string generated on each update of the data; now used only for debugging purposes google.protobuf.Timestamp last_updated = 5; +} +message ProjectMetadata { + string project = 1; + string project_uuid = 2; } diff --git a/sdk/python/docs/source/feast.infra.materialization.rst b/sdk/python/docs/source/feast.infra.materialization.rst new file mode 100644 index 0000000000..49fdc404cb --- /dev/null +++ b/sdk/python/docs/source/feast.infra.materialization.rst @@ -0,0 +1,29 @@ +feast.infra.materialization package +=================================== + +Submodules +---------- + +feast.infra.materialization.batch\_materialization\_engine module +----------------------------------------------------------------- + +.. automodule:: feast.infra.materialization.batch_materialization_engine + :members: + :undoc-members: + :show-inheritance: + +feast.infra.materialization.local\_engine module +------------------------------------------------ + +.. automodule:: feast.infra.materialization.local_engine + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.infra.materialization + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.rst b/sdk/python/docs/source/feast.infra.rst index ec2cc120a6..42c7d1334b 100644 --- a/sdk/python/docs/source/feast.infra.rst +++ b/sdk/python/docs/source/feast.infra.rst @@ -7,6 +7,7 @@ Subpackages .. toctree:: :maxdepth: 4 + feast.infra.materialization feast.infra.offline_stores feast.infra.online_stores feast.infra.registry_stores diff --git a/sdk/python/docs/source/feast.rst b/sdk/python/docs/source/feast.rst index 7c569fc713..c000ac2e2b 100644 --- a/sdk/python/docs/source/feast.rst +++ b/sdk/python/docs/source/feast.rst @@ -225,6 +225,14 @@ feast.online\_response module :undoc-members: :show-inheritance: +feast.project\_metadata module +------------------------------ + +.. automodule:: feast.project_metadata + :members: + :undoc-members: + :show-inheritance: + feast.proto\_json module ------------------------ @@ -273,6 +281,14 @@ feast.repo\_operations module :undoc-members: :show-inheritance: +feast.repo\_upgrade module +-------------------------- + +.. automodule:: feast.repo_upgrade + :members: + :undoc-members: + :show-inheritance: + feast.request\_feature\_view module ----------------------------------- diff --git a/sdk/python/feast/driver_test_data.py b/sdk/python/feast/driver_test_data.py index 117bfcbd9c..991b5391e8 100644 --- a/sdk/python/feast/driver_test_data.py +++ b/sdk/python/feast/driver_test_data.py @@ -98,7 +98,7 @@ def create_driver_hourly_stats_df(drivers, start_date, end_date) -> pd.DataFrame "event_timestamp": [ pd.Timestamp(dt, unit="ms", tz="UTC").round("ms") for dt in pd.date_range( - start=start_date, end=end_date, freq="1H", closed="left" + start=start_date, end=end_date, freq="1H", inclusive="left" ) ] # include a fixed timestamp for get_historical_features in the quickstart diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index e3c1251dad..1eeb8c22d3 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -142,7 +142,7 @@ def __init__( self._registry = SqlRegistry(registry_config, None) else: r = Registry(registry_config, repo_path=self.repo_path) - r._initialize_registry() + r._initialize_registry(self.config.project) self._registry = r self._provider = get_provider(self.config, self.repo_path) self._go_server = None @@ -183,7 +183,7 @@ def refresh_registry(self): """ registry_config = self.config.get_registry_config() registry = Registry(registry_config, repo_path=self.repo_path) - registry.refresh() + registry.refresh(self.config.project) self._registry = registry @@ -704,7 +704,7 @@ def plan( # Compute the desired difference between the current infra, as stored in the registry, # and the desired infra. - self._registry.refresh() + self._registry.refresh(self.project) current_infra_proto = self._registry.proto().infra.__deepcopy__() desired_registry_proto = desired_repo_contents.to_registry_proto() new_infra = self._provider.plan_infra(self.config, desired_registry_proto) diff --git a/sdk/python/feast/infra/registry_stores/sql.py b/sdk/python/feast/infra/registry_stores/sql.py index 7ea4a96849..2d3ac9d683 100644 --- a/sdk/python/feast/infra/registry_stores/sql.py +++ b/sdk/python/feast/infra/registry_stores/sql.py @@ -1,4 +1,6 @@ +import uuid from datetime import datetime +from enum import Enum from pathlib import Path from typing import Any, List, Optional, Set, Union @@ -17,6 +19,7 @@ ) from sqlalchemy.engine import Engine +from feast import usage from feast.base_feature_view import BaseFeatureView from feast.data_source import DataSource from feast.entity import Entity @@ -32,6 +35,7 @@ from feast.feature_view import FeatureView from feast.infra.infra_object import Infra from feast.on_demand_feature_view import OnDemandFeatureView +from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto from feast.protos.feast.core.FeatureService_pb2 import ( @@ -156,6 +160,12 @@ Column("infra_proto", LargeBinary, nullable=False), ) + +class FeastMetadataKeys(Enum): + LAST_UPDATED_TIMESTAMP = "last_updated_timestamp" + PROJECT_UUID = "project_uuid" + + feast_metadata = Table( "feast_metadata", metadata, @@ -189,7 +199,7 @@ def teardown(self): stmt = delete(t) conn.execute(stmt) - def refresh(self): + def refresh(self, project: Optional[str]): # This method is a no-op since we're always reading the latest values from the db. pass @@ -459,6 +469,22 @@ def list_on_demand_feature_views( "feature_view_proto", ) + def list_project_metadata( + self, project: str, allow_cache: bool = False + ) -> List[ProjectMetadata]: + with self.engine.connect() as conn: + stmt = select(feast_metadata).where(feast_metadata.c.project_id == project,) + rows = conn.execute(stmt).all() + if rows: + project_metadata = ProjectMetadata(project_name=project) + for row in rows: + if row["metadata_key"] == FeastMetadataKeys.PROJECT_UUID.value: + project_metadata.project_uuid = row["metadata_value"] + break + # TODO(adchia): Add other project metadata in a structured way + return [project_metadata] + return [] + def apply_saved_dataset( self, saved_dataset: SavedDataset, project: str, commit: bool = True, ): @@ -629,6 +655,7 @@ def proto(self) -> RegistryProto: (self.list_feature_services, r.feature_services), (self.list_saved_datasets, r.saved_datasets), (self.list_validation_references, r.validation_references), + (self.list_project_metadata, r.project_metadata), ]: objs: List[Any] = lister(project) # type: ignore if objs: @@ -651,14 +678,16 @@ def commit(self): def _apply_object( self, table, project: str, id_field_name, obj, proto_field_name, name=None ): + self._maybe_init_project_metadata(project) + name = name or obj.name with self.engine.connect() as conn: + update_datetime = datetime.utcnow() + update_time = int(update_datetime.timestamp()) stmt = select(table).where( getattr(table.c, id_field_name) == name, table.c.project_id == project ) row = conn.execute(stmt).first() - update_datetime = datetime.utcnow() - update_time = int(update_datetime.timestamp()) if hasattr(obj, "last_updated_timestamp"): obj.last_updated_timestamp = update_datetime @@ -685,6 +714,30 @@ def _apply_object( self._set_last_updated_metadata(update_datetime, project) + def _maybe_init_project_metadata(self, project): + # Initialize project metadata if needed + with self.engine.connect() as conn: + update_datetime = datetime.utcnow() + update_time = int(update_datetime.timestamp()) + stmt = select(feast_metadata).where( + feast_metadata.c.metadata_key == FeastMetadataKeys.PROJECT_UUID.value, + feast_metadata.c.project_id == project, + ) + row = conn.execute(stmt).first() + if row: + usage.set_current_project_uuid(row["metadata_value"]) + else: + new_project_uuid = f"{uuid.uuid4()}" + values = { + "metadata_key": FeastMetadataKeys.PROJECT_UUID.value, + "metadata_value": new_project_uuid, + "last_updated_timestamp": update_time, + "project_id": project, + } + insert_stmt = insert(feast_metadata).values(values) + conn.execute(insert_stmt) + usage.set_current_project_uuid(new_project_uuid) + def _delete_object(self, table, name, project, id_field_name, not_found_exception): with self.engine.connect() as conn: stmt = delete(table).where( @@ -708,6 +761,8 @@ def _get_object( proto_field_name, not_found_exception, ): + self._maybe_init_project_metadata(project) + with self.engine.connect() as conn: stmt = select(table).where( getattr(table.c, id_field_name) == name, table.c.project_id == project @@ -721,6 +776,7 @@ def _get_object( def _list_objects( self, table, project, proto_class, python_class, proto_field_name ): + self._maybe_init_project_metadata(project) with self.engine.connect() as conn: stmt = select(table).where(table.c.project_id == project) rows = conn.execute(stmt).all() @@ -736,7 +792,8 @@ def _list_objects( def _set_last_updated_metadata(self, last_updated: datetime, project: str): with self.engine.connect() as conn: stmt = select(feast_metadata).where( - feast_metadata.c.metadata_key == "last_updated_timestamp", + feast_metadata.c.metadata_key + == FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value, feast_metadata.c.project_id == project, ) row = conn.execute(stmt).first() @@ -744,7 +801,7 @@ def _set_last_updated_metadata(self, last_updated: datetime, project: str): update_time = int(last_updated.timestamp()) values = { - "metadata_key": "last_updated_timestamp", + "metadata_key": FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value, "metadata_value": f"{update_time}", "last_updated_timestamp": update_time, "project_id": project, @@ -753,7 +810,8 @@ def _set_last_updated_metadata(self, last_updated: datetime, project: str): update_stmt = ( update(feast_metadata) .where( - feast_metadata.c.metadata_key == "last_updated_timestamp", + feast_metadata.c.metadata_key + == FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value, feast_metadata.c.project_id == project, ) .values(values) @@ -766,7 +824,8 @@ def _set_last_updated_metadata(self, last_updated: datetime, project: str): def _get_last_updated_metadata(self, project: str): with self.engine.connect() as conn: stmt = select(feast_metadata).where( - feast_metadata.c.metadata_key == "last_updated_timestamp", + feast_metadata.c.metadata_key + == FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value, feast_metadata.c.project_id == project, ) row = conn.execute(stmt).first() diff --git a/sdk/python/feast/project_metadata.py b/sdk/python/feast/project_metadata.py new file mode 100644 index 0000000000..829e9ff0d5 --- /dev/null +++ b/sdk/python/feast/project_metadata.py @@ -0,0 +1,111 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import uuid +from typing import Optional + +from google.protobuf.json_format import MessageToJson +from typeguard import typechecked + +from feast.protos.feast.core.Registry_pb2 import ProjectMetadata as ProjectMetadataProto +from feast.usage import log_exceptions + + +@typechecked +class ProjectMetadata: + """ + Tracks project level metadata + + Attributes: + project_name: The registry-scoped unique name of the project. + project_uuid: The UUID for this project + """ + + project_name: str + project_uuid: str + + @log_exceptions + def __init__( + self, + *args, + project_name: Optional[str] = None, + project_uuid: Optional[str] = None, + ): + """ + Creates an Project metadata object. + + Args: + project_name: The registry-scoped unique name of the project. + project_uuid: The UUID for this project + + Raises: + ValueError: Parameters are specified incorrectly. + """ + if not project_name: + raise ValueError("Project name needs to be specified") + + self.project_name = project_name + self.project_uuid = project_uuid or f"{uuid.uuid4()}" + + def __hash__(self) -> int: + return hash((self.project_name, self.project_uuid)) + + def __eq__(self, other): + if not isinstance(other, ProjectMetadata): + raise TypeError( + "Comparisons should only involve ProjectMetadata class objects." + ) + + if ( + self.project_name != other.project_name + or self.project_uuid != other.project_uuid + ): + return False + + return True + + def __str__(self): + return str(MessageToJson(self.to_proto())) + + def __lt__(self, other): + return self.project_name < other.project_name + + @classmethod + def from_proto(cls, project_metadata_proto: ProjectMetadataProto): + """ + Creates project metadata from a protobuf representation. + + Args: + project_metadata_proto: A protobuf representation of project metadata. + + Returns: + A ProjectMetadata object based on the protobuf. + """ + entity = cls( + project_name=project_metadata_proto.project, + project_uuid=project_metadata_proto.project_uuid, + ) + + return entity + + def to_proto(self) -> ProjectMetadataProto: + """ + Converts a project metadata object to its protobuf representation. + + Returns: + An ProjectMetadataProto protobuf. + """ + + return ProjectMetadataProto( + project=self.project_name, project_uuid=self.project_uuid + ) diff --git a/sdk/python/feast/registry.py b/sdk/python/feast/registry.py index c721bd648a..f72fd717d2 100644 --- a/sdk/python/feast/registry.py +++ b/sdk/python/feast/registry.py @@ -14,6 +14,7 @@ import abc import json import logging +import uuid from abc import abstractmethod from collections import defaultdict from datetime import datetime, timedelta @@ -28,6 +29,7 @@ from google.protobuf.json_format import MessageToJson from proto import Message +from feast import usage from feast.base_feature_view import BaseFeatureView from feast.data_source import DataSource from feast.entity import Entity @@ -47,6 +49,8 @@ from feast.importer import import_class from feast.infra.infra_object import Infra from feast.on_demand_feature_view import OnDemandFeatureView +from feast.project_metadata import ProjectMetadata +from feast.protos.feast.core.Registry_pb2 import ProjectMetadata as ProjectMetadataProto from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.registry_store import NoopRegistryStore from feast.repo_config import RegistryConfig @@ -57,7 +61,6 @@ REGISTRY_SCHEMA_VERSION = "1" - REGISTRY_STORE_CLASS_FOR_TYPE = { "GCSRegistryStore": "feast.infra.gcp.GCSRegistryStore", "S3RegistryStore": "feast.infra.aws.S3RegistryStore", @@ -121,7 +124,6 @@ def get_objects_from_repo_contents( FEAST_OBJECT_TYPES = [feast_object_type for feast_object_type in FeastObjectType] - logger = logging.getLogger(__name__) @@ -607,6 +609,20 @@ def list_validation_references( List of request feature views """ + def list_project_metadata( + self, project: str, allow_cache: bool = False + ) -> List[ProjectMetadata]: + """ + Retrieves project metadata + + Args: + project: Filter metadata based on project name + allow_cache: Allow returning feature views from the cached registry + + Returns: + List of project metadata + """ + @abstractmethod def update_infra(self, infra: Infra, project: str, commit: bool = True): """ @@ -660,7 +676,7 @@ def commit(self): """Commits the state of the registry cache to the remote registry store.""" @abstractmethod - def refresh(self): + def refresh(self, project: Optional[str]): """Refreshes the state of the registry cache by fetching the registry state from the remote registry store.""" @staticmethod @@ -678,6 +694,10 @@ def to_dict(self, project: str) -> Dict[str, List[Any]]: """ registry_dict: Dict[str, Any] = defaultdict(list) registry_dict["project"] = project + for project_metadata in sorted(self.list_project_metadata(project=project)): + registry_dict["projectMetadata"].append( + self._message_to_sorted_dict(project_metadata.to_proto()) + ) for data_source in sorted( self.list_data_sources(project=project), key=lambda ds: ds.name ): @@ -733,6 +753,25 @@ def to_dict(self, project: str) -> Dict[str, List[Any]]: return registry_dict +def _get_project_metadata( + registry_proto: Optional[RegistryProto], project: str +) -> Optional[ProjectMetadataProto]: + if not registry_proto: + return None + for pm in registry_proto.project_metadata: + if pm.project == project: + return pm + return None + + +def _init_project_metadata(cached_registry_proto: RegistryProto, project: str): + new_project_uuid = f"{uuid.uuid4()}" + usage.set_current_project_uuid(new_project_uuid) + cached_registry_proto.project_metadata.append( + ProjectMetadata(project_name=project, project_uuid=new_project_uuid).to_proto() + ) + + class Registry(BaseRegistry): """ Registry: A registry allows for the management and persistence of feature definitions and related metadata. @@ -811,13 +850,14 @@ def clone(self) -> "Registry": new_registry._registry_store = NoopRegistryStore() return new_registry - def _initialize_registry(self): + def _initialize_registry(self, project: str): """Explicitly initializes the registry with an empty proto if it doesn't exist.""" try: - self._get_registry_proto() + self._get_registry_proto(project=project) except FileNotFoundError: registry_proto = RegistryProto() registry_proto.registry_schema_version = REGISTRY_SCHEMA_VERSION + _init_project_metadata(registry_proto, project) self._registry_store.update_registry_proto(registry_proto) def update_infra(self, infra: Infra, project: str, commit: bool = True): @@ -829,7 +869,7 @@ def update_infra(self, infra: Infra, project: str, commit: bool = True): project: Feast project that the Infra object refers to commit: Whether the change should be persisted immediately """ - self._prepare_registry_for_changes() + self._prepare_registry_for_changes(project) assert self.cached_registry_proto self.cached_registry_proto.infra.CopyFrom(infra.to_proto()) @@ -847,7 +887,9 @@ def get_infra(self, project: str, allow_cache: bool = False) -> Infra: Returns: The stored Infra object. """ - registry_proto = self._get_registry_proto(allow_cache=allow_cache) + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) return Infra.from_proto(registry_proto.infra) def apply_entity(self, entity: Entity, project: str, commit: bool = True): @@ -868,7 +910,7 @@ def apply_entity(self, entity: Entity, project: str, commit: bool = True): entity_proto = entity.to_proto() entity_proto.spec.project = project - self._prepare_registry_for_changes() + self._prepare_registry_for_changes(project) assert self.cached_registry_proto for idx, existing_entity_proto in enumerate( @@ -896,7 +938,9 @@ def list_entities(self, project: str, allow_cache: bool = False) -> List[Entity] Returns: List of entities """ - registry_proto = self._get_registry_proto(allow_cache=allow_cache) + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) entities = [] for entity_proto in registry_proto.entities: if entity_proto.spec.project == project: @@ -916,7 +960,9 @@ def list_data_sources( Returns: List of data sources """ - registry_proto = self._get_registry_proto(allow_cache=allow_cache) + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) data_sources = [] for data_source_proto in registry_proto.data_sources: if data_source_proto.project == project: @@ -934,7 +980,7 @@ def apply_data_source( project: Feast project that this data source belongs to commit: Whether to immediately commit to the registry """ - registry = self._prepare_registry_for_changes() + registry = self._prepare_registry_for_changes(project) for idx, existing_data_source_proto in enumerate(registry.data_sources): if existing_data_source_proto.name == data_source.name: del registry.data_sources[idx] @@ -959,7 +1005,7 @@ def delete_data_source(self, name: str, project: str, commit: bool = True): project: Feast project that this data source belongs to commit: Whether the change should be persisted immediately """ - self._prepare_registry_for_changes() + self._prepare_registry_for_changes(project) assert self.cached_registry_proto for idx, data_source_proto in enumerate( @@ -990,7 +1036,7 @@ def apply_feature_service( feature_service_proto = feature_service.to_proto() feature_service_proto.spec.project = project - registry = self._prepare_registry_for_changes() + registry = self._prepare_registry_for_changes(project) for idx, existing_feature_service_proto in enumerate(registry.feature_services): if ( @@ -1017,7 +1063,7 @@ def list_feature_services( List of feature services """ - registry = self._get_registry_proto(allow_cache=allow_cache) + registry = self._get_registry_proto(project=project, allow_cache=allow_cache) feature_services = [] for feature_service_proto in registry.feature_services: if feature_service_proto.spec.project == project: @@ -1041,7 +1087,7 @@ def get_feature_service( Returns either the specified feature service, or raises an exception if none is found """ - registry = self._get_registry_proto(allow_cache=allow_cache) + registry = self._get_registry_proto(project=project, allow_cache=allow_cache) for feature_service_proto in registry.feature_services: if ( @@ -1064,7 +1110,9 @@ def get_entity(self, name: str, project: str, allow_cache: bool = False) -> Enti Returns either the specified entity, or raises an exception if none is found """ - registry_proto = self._get_registry_proto(allow_cache=allow_cache) + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) for entity_proto in registry_proto.entities: if entity_proto.spec.name == name and entity_proto.spec.project == project: return Entity.from_proto(entity_proto) @@ -1090,7 +1138,7 @@ def apply_feature_view( feature_view_proto = feature_view.to_proto() feature_view_proto.spec.project = project - self._prepare_registry_for_changes() + self._prepare_registry_for_changes(project) assert self.cached_registry_proto self._check_conflicting_feature_view_names(feature_view) @@ -1146,7 +1194,7 @@ def list_stream_feature_views( Returns: List of stream feature views """ - registry = self._get_registry_proto(allow_cache=allow_cache) + registry = self._get_registry_proto(project=project, allow_cache=allow_cache) stream_feature_views = [] for stream_feature_view in registry.stream_feature_views: if stream_feature_view.spec.project == project: @@ -1169,7 +1217,7 @@ def list_on_demand_feature_views( List of on demand feature views """ - registry = self._get_registry_proto(allow_cache=allow_cache) + registry = self._get_registry_proto(project=project, allow_cache=allow_cache) on_demand_feature_views = [] for on_demand_feature_view in registry.on_demand_feature_views: if on_demand_feature_view.spec.project == project: @@ -1193,7 +1241,7 @@ def get_on_demand_feature_view( Returns either the specified on demand feature view, or raises an exception if none is found """ - registry = self._get_registry_proto(allow_cache=allow_cache) + registry = self._get_registry_proto(project=project, allow_cache=allow_cache) for on_demand_feature_view in registry.on_demand_feature_views: if ( @@ -1217,7 +1265,7 @@ def get_data_source( Returns: Returns either the specified data source, or raises an exception if none is found """ - registry = self._get_registry_proto(allow_cache=allow_cache) + registry = self._get_registry_proto(project=project, allow_cache=allow_cache) for data_source in registry.data_sources: if data_source.project == project and data_source.name == name: @@ -1242,7 +1290,7 @@ def apply_materialization( end_date (datetime): End date of the materialization interval to track commit: Whether the change should be persisted immediately """ - self._prepare_registry_for_changes() + self._prepare_registry_for_changes(project) assert self.cached_registry_proto for idx, existing_feature_view_proto in enumerate( @@ -1306,7 +1354,9 @@ def list_feature_views( Returns: List of feature views """ - registry_proto = self._get_registry_proto(allow_cache=allow_cache) + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) feature_views: List[FeatureView] = [] for feature_view_proto in registry_proto.feature_views: if feature_view_proto.spec.project == project: @@ -1320,13 +1370,12 @@ def get_request_feature_view(self, name: str, project: str): Args: name: Name of feature view project: Feast project that this feature view belongs to - allow_cache: Allow returning feature view from the cached registry Returns: Returns either the specified feature view, or raises an exception if none is found """ - registry_proto = self._get_registry_proto(allow_cache=False) + registry_proto = self._get_registry_proto(project=project, allow_cache=False) for feature_view_proto in registry_proto.feature_views: if ( feature_view_proto.spec.name == name @@ -1348,7 +1397,9 @@ def list_request_feature_views( Returns: List of feature views """ - registry_proto = self._get_registry_proto(allow_cache=allow_cache) + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) feature_views: List[RequestFeatureView] = [] for request_feature_view_proto in registry_proto.request_feature_views: if request_feature_view_proto.spec.project == project: @@ -1372,7 +1423,9 @@ def get_feature_view( Returns either the specified feature view, or raises an exception if none is found """ - registry_proto = self._get_registry_proto(allow_cache=allow_cache) + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) for feature_view_proto in registry_proto.feature_views: if ( feature_view_proto.spec.name == name @@ -1396,7 +1449,9 @@ def get_stream_feature_view( Returns either the specified feature view, or raises an exception if none is found """ - registry_proto = self._get_registry_proto(allow_cache=allow_cache) + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) for feature_view_proto in registry_proto.stream_feature_views: if ( feature_view_proto.spec.name == name @@ -1414,7 +1469,7 @@ def delete_feature_service(self, name: str, project: str, commit: bool = True): project: Feast project that this feature service belongs to commit: Whether the change should be persisted immediately """ - self._prepare_registry_for_changes() + self._prepare_registry_for_changes(project) assert self.cached_registry_proto for idx, feature_service_proto in enumerate( @@ -1439,7 +1494,7 @@ def delete_feature_view(self, name: str, project: str, commit: bool = True): project: Feast project that this feature view belongs to commit: Whether the change should be persisted immediately """ - self._prepare_registry_for_changes() + self._prepare_registry_for_changes(project) assert self.cached_registry_proto for idx, existing_feature_view_proto in enumerate( @@ -1501,7 +1556,7 @@ def delete_entity(self, name: str, project: str, commit: bool = True): project: Feast project that this entity belongs to commit: Whether the change should be persisted immediately """ - self._prepare_registry_for_changes() + self._prepare_registry_for_changes(project) assert self.cached_registry_proto for idx, existing_entity_proto in enumerate( @@ -1536,7 +1591,7 @@ def apply_saved_dataset( saved_dataset_proto = saved_dataset.to_proto() saved_dataset_proto.spec.project = project - self._prepare_registry_for_changes() + self._prepare_registry_for_changes(project) assert self.cached_registry_proto for idx, existing_saved_dataset_proto in enumerate( @@ -1568,7 +1623,9 @@ def get_saved_dataset( Returns either the specified SavedDataset, or raises an exception if none is found """ - registry_proto = self._get_registry_proto(allow_cache=allow_cache) + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) for saved_dataset in registry_proto.saved_datasets: if ( saved_dataset.spec.name == name @@ -1590,7 +1647,9 @@ def list_saved_datasets( Returns: Returns the list of SavedDatasets """ - registry_proto = self._get_registry_proto(allow_cache=allow_cache) + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) return [ SavedDataset.from_proto(saved_dataset) for saved_dataset in registry_proto.saved_datasets @@ -1614,7 +1673,7 @@ def apply_validation_reference( validation_reference_proto = validation_reference.to_proto() validation_reference_proto.project = project - registry_proto = self._prepare_registry_for_changes() + registry_proto = self._prepare_registry_for_changes(project) for idx, existing_validation_reference in enumerate( registry_proto.validation_references ): @@ -1644,7 +1703,9 @@ def get_validation_reference( Returns either the specified ValidationReference, or raises an exception if none is found """ - registry_proto = self._get_registry_proto(allow_cache=allow_cache) + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) for validation_reference in registry_proto.validation_references: if ( validation_reference.name == name @@ -1662,7 +1723,7 @@ def delete_validation_reference(self, name: str, project: str, commit: bool = Tr project: Feast project that this object belongs to commit: Whether the change should be persisted immediately """ - registry_proto = self._prepare_registry_for_changes() + registry_proto = self._prepare_registry_for_changes(project) for idx, existing_validation_reference in enumerate( registry_proto.validation_references ): @@ -1676,14 +1737,26 @@ def delete_validation_reference(self, name: str, project: str, commit: bool = Tr return raise ValidationReferenceNotFound(name, project=project) + def list_project_metadata( + self, project: str, allow_cache: bool = False + ) -> List[ProjectMetadata]: + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) + return [ + ProjectMetadata.from_proto(project_metadata) + for project_metadata in registry_proto.project_metadata + if project_metadata.project == project + ] + def commit(self): """Commits the state of the registry cache to the remote registry store.""" if self.cached_registry_proto: self._registry_store.update_registry_proto(self.cached_registry_proto) - def refresh(self): + def refresh(self, project: Optional[str]): """Refreshes the state of the registry cache by fetching the registry state from the remote registry store.""" - self._get_registry_proto(allow_cache=False) + self._get_registry_proto(project=project, allow_cache=False) def teardown(self): """Tears down (removes) the registry.""" @@ -1692,21 +1765,34 @@ def teardown(self): def proto(self) -> RegistryProto: return self.cached_registry_proto or RegistryProto() - def _prepare_registry_for_changes(self): + def _prepare_registry_for_changes(self, project: str): """Prepares the Registry for changes by refreshing the cache if necessary.""" try: - self._get_registry_proto(allow_cache=True) + self._get_registry_proto(project=project, allow_cache=True) + if _get_project_metadata(self.cached_registry_proto, project) is None: + # Project metadata not initialized yet. Try pulling without cache + self._get_registry_proto(project=project, allow_cache=False) except FileNotFoundError: registry_proto = RegistryProto() registry_proto.registry_schema_version = REGISTRY_SCHEMA_VERSION self.cached_registry_proto = registry_proto self.cached_registry_proto_created = datetime.utcnow() + + # Initialize project metadata if needed + assert self.cached_registry_proto + if _get_project_metadata(self.cached_registry_proto, project) is None: + _init_project_metadata(self.cached_registry_proto, project) + self.commit() + return self.cached_registry_proto - def _get_registry_proto(self, allow_cache: bool = False) -> RegistryProto: + def _get_registry_proto( + self, project: Optional[str], allow_cache: bool = False + ) -> RegistryProto: """Returns the cached or remote registry state Args: + project: Name of the Feast project (optional) allow_cache: Whether to allow the use of the registry cache when fetching the RegistryProto Returns: Returns a RegistryProto object which represents the state of the registry @@ -1727,7 +1813,15 @@ def _get_registry_proto(self, allow_cache: bool = False) -> RegistryProto: ) ) - if allow_cache and not expired: + if project: + old_project_metadata = _get_project_metadata( + registry_proto=self.cached_registry_proto, project=project + ) + + if allow_cache and not expired and old_project_metadata is not None: + assert isinstance(self.cached_registry_proto, RegistryProto) + return self.cached_registry_proto + elif allow_cache and not expired: assert isinstance(self.cached_registry_proto, RegistryProto) return self.cached_registry_proto @@ -1735,6 +1829,18 @@ def _get_registry_proto(self, allow_cache: bool = False) -> RegistryProto: self.cached_registry_proto = registry_proto self.cached_registry_proto_created = datetime.utcnow() + if not project: + return registry_proto + + project_metadata = _get_project_metadata( + registry_proto=registry_proto, project=project + ) + if project_metadata: + usage.set_current_project_uuid(project_metadata.project_uuid) + else: + _init_project_metadata(registry_proto, project) + self.commit() + return registry_proto def _check_conflicting_feature_view_names(self, feature_view: BaseFeatureView): diff --git a/sdk/python/feast/templates/gcp/driver_repo.py b/sdk/python/feast/templates/gcp/driver_repo.py index acb17d5519..6c904a0fee 100644 --- a/sdk/python/feast/templates/gcp/driver_repo.py +++ b/sdk/python/feast/templates/gcp/driver_repo.py @@ -18,6 +18,7 @@ # Indicates a data source from which feature values can be retrieved. Sources are queried when building training # datasets or materializing features into an online store. driver_stats_source = BigQuerySource( + name="driver_hourly_stats_source", # The BigQuery table where features can be found table="feast-oss.demo_data.driver_hourly_stats_2", # The event timestamp is used for point-in-time joins and for ensuring only diff --git a/sdk/python/feast/templates/hbase/example.py b/sdk/python/feast/templates/hbase/example.py index b34696185b..6845371f1f 100644 --- a/sdk/python/feast/templates/hbase/example.py +++ b/sdk/python/feast/templates/hbase/example.py @@ -9,6 +9,7 @@ # production, you can use your favorite DWH, such as BigQuery. See Feast documentation # for more info. driver_hourly_stats = FileSource( + name="driver_hourly_stats_source", path="%PARQUET_PATH%", timestamp_field="event_timestamp", created_timestamp_column="created", diff --git a/sdk/python/feast/templates/local/example.py b/sdk/python/feast/templates/local/example.py index 30f9adf189..4fd30ba3a1 100644 --- a/sdk/python/feast/templates/local/example.py +++ b/sdk/python/feast/templates/local/example.py @@ -9,6 +9,7 @@ # production, you can use your favorite DWH, such as BigQuery. See Feast documentation # for more info. driver_hourly_stats = FileSource( + name="driver_hourly_stats_source", path="%PARQUET_PATH%", timestamp_field="event_timestamp", created_timestamp_column="created", diff --git a/sdk/python/feast/usage.py b/sdk/python/feast/usage.py index 90b659479d..471a1b9671 100644 --- a/sdk/python/feast/usage.py +++ b/sdk/python/feast/usage.py @@ -40,6 +40,7 @@ _is_enabled = os.getenv(FEAST_USAGE, default=DEFAULT_FEAST_USAGE_VALUE) == "True" _constant_attributes = { + "project_id": "", "session_id": str(uuid.uuid4()), "installation_id": None, "version": get_version(), @@ -53,6 +54,10 @@ } +def set_current_project_uuid(project_uuid: str): + _constant_attributes["project_id"] = project_uuid + + @dataclasses.dataclass class FnCall: fn_name: str diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 4300ca64b6..6342544417 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -428,7 +428,7 @@ def construct_test_environment( fs = FeatureStore(repo_dir_name) # We need to initialize the registry, because if nothing is applied in the test before tearing down # the feature store, that will cause the teardown method to blow up. - fs.registry._initialize_registry() + fs.registry._initialize_registry(project) environment = Environment( name=project, test_repo_config=test_repo_config, diff --git a/sdk/python/tests/integration/registration/test_registry.py b/sdk/python/tests/integration/registration/test_registry.py index ac7696f6e7..27bbbbd2bb 100644 --- a/sdk/python/tests/integration/registration/test_registry.py +++ b/sdk/python/tests/integration/registration/test_registry.py @@ -110,7 +110,7 @@ def test_apply_entity_success(test_registry): # Will try to reload registry, which will fail because the file has been deleted with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto() + test_registry._get_registry_proto(project=project) @pytest.mark.integration @@ -150,7 +150,7 @@ def test_apply_entity_integration(test_registry): # Will try to reload registry, which will fail because the file has been deleted with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto() + test_registry._get_registry_proto(project=project) @pytest.mark.parametrize( @@ -225,7 +225,7 @@ def test_apply_feature_view_success(test_registry): # Will try to reload registry, which will fail because the file has been deleted with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto() + test_registry._get_registry_proto(project=project) @pytest.mark.parametrize( @@ -299,7 +299,7 @@ def location_features_from_push(inputs: pd.DataFrame) -> pd.DataFrame: # Will try to reload registry, which will fail because the file has been deleted with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto() + test_registry._get_registry_proto(project=project) @pytest.mark.parametrize( @@ -364,7 +364,7 @@ def simple_udf(x: int): # Will try to reload registry, which will fail because the file has been deleted with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto() + test_registry._get_registry_proto(project=project) @pytest.mark.parametrize( @@ -488,7 +488,7 @@ def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: # Will try to reload registry, which will fail because the file has been deleted with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto() + test_registry._get_registry_proto(project=project) @pytest.mark.integration @@ -564,7 +564,7 @@ def test_apply_feature_view_integration(test_registry): # Will try to reload registry, which will fail because the file has been deleted with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto() + test_registry._get_registry_proto(project=project) @pytest.mark.integration @@ -640,7 +640,7 @@ def run_test_data_source_apply(test_registry: Registry): # Will try to reload registry, which will fail because the file has been deleted with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto() + test_registry._get_registry_proto(project=project) def test_commit(): @@ -656,10 +656,15 @@ def test_commit(): # Register Entity without commiting test_registry.apply_entity(entity, project, commit=False) + assert test_registry.cached_registry_proto + assert len(test_registry.cached_registry_proto.project_metadata) == 1 + project_metadata = test_registry.cached_registry_proto.project_metadata[0] + project_uuid = project_metadata.project_uuid + assert len(project_uuid) == 36 + assert_project_uuid(project_uuid, test_registry) # Retrieving the entity should still succeed entities = test_registry.list_entities(project, allow_cache=True) - entity = entities[0] assert ( len(entities) == 1 @@ -668,6 +673,7 @@ def test_commit(): and "team" in entity.tags and entity.tags["team"] == "matchmaking" ) + assert_project_uuid(project_uuid, test_registry) entity = test_registry.get_entity("driver_car_id", project, allow_cache=True) assert ( @@ -676,6 +682,7 @@ def test_commit(): and "team" in entity.tags and entity.tags["team"] == "matchmaking" ) + assert_project_uuid(project_uuid, test_registry) # Create new registry that points to the same store registry_with_same_store = Registry(registry_config, None) @@ -683,6 +690,7 @@ def test_commit(): # Retrieving the entity should fail since the store is empty entities = registry_with_same_store.list_entities(project) assert len(entities) == 0 + assert_project_uuid(project_uuid, registry_with_same_store) # commit from the original registry test_registry.commit() @@ -692,7 +700,6 @@ def test_commit(): # Retrieving the entity should now succeed entities = registry_with_same_store.list_entities(project) - entity = entities[0] assert ( len(entities) == 1 @@ -701,6 +708,7 @@ def test_commit(): and "team" in entity.tags and entity.tags["team"] == "matchmaking" ) + assert_project_uuid(project_uuid, registry_with_same_store) entity = test_registry.get_entity("driver_car_id", project) assert ( @@ -714,4 +722,10 @@ def test_commit(): # Will try to reload registry, which will fail because the file has been deleted with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto() + test_registry._get_registry_proto(project=project) + + +def assert_project_uuid(project_uuid, test_registry): + assert len(test_registry.cached_registry_proto.project_metadata) == 1 + project_metadata = test_registry.cached_registry_proto.project_metadata[0] + assert project_metadata.project_uuid == project_uuid diff --git a/sdk/python/tests/integration/registration/test_sql_registry.py b/sdk/python/tests/integration/registration/test_sql_registry.py index c483a7c46f..56aff8c6d1 100644 --- a/sdk/python/tests/integration/registration/test_sql_registry.py +++ b/sdk/python/tests/integration/registration/test_sql_registry.py @@ -117,8 +117,14 @@ def test_apply_entity_success(sql_registry): # Register Entity sql_registry.apply_entity(entity, project) + project_metadata = sql_registry.list_project_metadata(project=project) + assert len(project_metadata) == 1 + project_uuid = project_metadata[0].project_uuid + assert len(project_metadata[0].project_uuid) == 36 + assert_project_uuid(project, project_uuid, sql_registry) entities = sql_registry.list_entities(project) + assert_project_uuid(project, project_uuid, sql_registry) entity = entities[0] assert ( @@ -138,12 +144,20 @@ def test_apply_entity_success(sql_registry): ) sql_registry.delete_entity("driver_car_id", project) + assert_project_uuid(project, project_uuid, sql_registry) entities = sql_registry.list_entities(project) + assert_project_uuid(project, project_uuid, sql_registry) assert len(entities) == 0 sql_registry.teardown() +def assert_project_uuid(project, project_uuid, sql_registry): + project_metadata = sql_registry.list_project_metadata(project=project) + assert len(project_metadata) == 1 + assert project_metadata[0].project_uuid == project_uuid + + @pytest.mark.skipif( sys.platform == "darwin" and "GITHUB_REF" in os.environ, reason="does not run on mac github actions", From d3868c5ceec62bab9e12e115c5167c88df0a2745 Mon Sep 17 00:00:00 2001 From: Abhin Chhabra Date: Mon, 18 Jul 2022 21:22:51 -0400 Subject: [PATCH 39/73] chore: Upgrade GCP dependencies (#2945) * Upgrade GCP dependencies. Signed-off-by: Abhin Chhabra * `entity_df` in `get_historical_features` should be tzaware Signed-off-by: Abhin Chhabra * Linting fixes Signed-off-by: Abhin Chhabra * Update 3.9 reqs Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Remove unnecessary code coverage Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang Co-authored-by: Kevin Zhang --- sdk/python/feast/feature_store.py | 10 +- .../feast/infra/offline_stores/bigquery.py | 5 +- sdk/python/feast/utils.py | 9 ++ .../requirements/py3.10-ci-requirements.txt | 102 ++++++++++-------- .../requirements/py3.10-requirements.txt | 18 ++-- .../requirements/py3.8-ci-requirements.txt | 76 +++++++------ .../requirements/py3.8-requirements.txt | 10 +- .../requirements/py3.9-ci-requirements.txt | 102 ++++++++++-------- .../requirements/py3.9-requirements.txt | 18 ++-- .../universal/data_sources/bigquery.py | 5 + setup.py | 7 +- 11 files changed, 203 insertions(+), 159 deletions(-) diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 1eeb8c22d3..4856a46f8c 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -1057,17 +1057,17 @@ def get_historical_features( # Check that the right request data is present in the entity_df if type(entity_df) == pd.DataFrame: - entity_pd_df = cast(pd.DataFrame, entity_df) + entity_df = utils.make_df_tzaware(cast(pd.DataFrame, entity_df)) for fv in request_feature_views: for feature in fv.features: - if feature.name not in entity_pd_df.columns: + if feature.name not in entity_df.columns: raise RequestDataNotFoundInEntityDfException( feature_name=feature.name, feature_view_name=fv.name ) for odfv in on_demand_feature_views: odfv_request_data_schema = odfv.get_request_data_schema() for feature_name in odfv_request_data_schema.keys(): - if feature_name not in entity_pd_df.columns: + if feature_name not in entity_df.columns: raise RequestDataNotFoundInEntityDfException( feature_name=feature_name, feature_view_name=odfv.name, ) @@ -2273,7 +2273,7 @@ def _teardown_go_server(self): @log_exceptions_and_usage def write_logged_features( - self, logs: Union[pa.Table, Path], source: Union[FeatureService] + self, logs: Union[pa.Table, Path], source: FeatureService ): """ Write logs produced by a source (currently only feature service is supported as a source) @@ -2302,7 +2302,7 @@ def write_logged_features( @log_exceptions_and_usage def validate_logged_features( self, - source: Union[FeatureService], + source: FeatureService, start: datetime, end: datetime, reference: ValidationReference, diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index 3bf340acf9..982045607d 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -424,7 +424,7 @@ def to_bigquery( job_config: bigquery.QueryJobConfig = None, timeout: int = 1800, retry_cadence: int = 10, - ) -> Optional[str]: + ) -> str: """ Triggers the execution of a historical feature retrieval query and exports the results to a BigQuery table. Runs for a maximum amount of time specified by the timeout parameter (defaulting to 30 minutes). @@ -567,7 +567,7 @@ def _wait_until_done(bq_job): finally: if client.get_job(bq_job).state in ["PENDING", "RUNNING"]: - client.cancel_job(bq_job) + client.cancel_job(bq_job.job_id) raise BigQueryJobCancelled(job_id=bq_job.job_id) if bq_job.exception(): @@ -601,6 +601,7 @@ def _upload_entity_df( client: Client, table_name: str, entity_df: Union[pd.DataFrame, str], ) -> Table: """Uploads a Pandas entity dataframe into a BigQuery table and returns the resulting table""" + job: Union[bigquery.job.query.QueryJob, bigquery.job.load.LoadJob] if isinstance(entity_df, str): job = client.query(f"CREATE TABLE {table_name} AS ({entity_df})") diff --git a/sdk/python/feast/utils.py b/sdk/python/feast/utils.py index 9f18da38cd..af22fbca3f 100644 --- a/sdk/python/feast/utils.py +++ b/sdk/python/feast/utils.py @@ -28,6 +28,15 @@ def make_tzaware(t: datetime) -> datetime: return t +def make_df_tzaware(t: pd.DataFrame) -> pd.DataFrame: + """Make all datetime type columns tzaware; leave everything else intact.""" + df = t.copy() # don't modify incoming dataframe inplace + for column in df.columns: + if pd.api.types.is_datetime64_any_dtype(df[column]): + df[column] = pd.to_datetime(df[column], utc=True) + return df + + def to_naive_utc(ts: datetime) -> datetime: if ts.tzinfo is None: return ts diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index e0b05ecaa0..9e47356000 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -4,7 +4,7 @@ # # pip-compile --extra=ci --output-file=sdk/python/requirements/py3.10-ci-requirements.txt # -absl-py==1.1.0 +absl-py==1.2.0 # via tensorflow-metadata adal==1.2.7 # via @@ -59,7 +59,7 @@ attrs==21.4.0 # pytest avro==1.10.0 # via feast (setup.py) -azure-core==1.24.1 +azure-core==1.24.2 # via # adlfs # azure-identity @@ -69,7 +69,7 @@ azure-datalake-store==0.0.52 # via adlfs azure-identity==1.10.0 # via adlfs -azure-storage-blob==12.12.0 +azure-storage-blob==12.13.0 # via adlfs babel==2.10.3 # via sphinx @@ -95,7 +95,7 @@ build==0.8.0 # pip-tools cachecontrol==0.12.11 # via firebase-admin -cachetools==4.2.4 +cachetools==5.2.0 # via google-auth certifi==2022.6.15 # via @@ -103,7 +103,7 @@ certifi==2022.6.15 # msrest # requests # snowflake-connector-python -cffi==1.15.0 +cffi==1.15.1 # via # azure-datalake-store # cryptography @@ -130,7 +130,7 @@ colorama==0.4.5 # via # feast (setup.py) # great-expectations -coverage[toml]==6.4.1 +coverage[toml]==6.4.2 # via pytest-cov cryptography==35.0.0 # via @@ -147,6 +147,8 @@ dask==2022.1.1 # via feast (setup.py) dataclasses==0.6 # via great-expectations +db-dtypes==1.0.2 + # via google-cloud-bigquery decorator==5.1.1 # via # gcsfs @@ -157,7 +159,7 @@ deprecation==2.1.0 # via testcontainers dill==0.3.5.1 # via feast (setup.py) -distlib==0.3.4 +distlib==0.3.5 # via virtualenv docker==5.0.3 # via @@ -173,17 +175,17 @@ execnet==1.9.0 # via pytest-xdist executing==0.8.3 # via stack-data -fastapi==0.78.0 +fastapi==0.79.0 # via feast (setup.py) fastavro==1.5.2 # via # feast (setup.py) # pandavro -fastjsonschema==2.15.3 +fastjsonschema==2.16.1 # via nbformat filelock==3.7.1 # via virtualenv -firebase-admin==4.5.2 +firebase-admin==5.2.0 # via feast (setup.py) fissix==21.11.13 # via bowler @@ -201,7 +203,7 @@ fsspec==2022.1.0 # s3fs gcsfs==2022.1.0 # via feast (setup.py) -google-api-core[grpc]==1.31.6 +google-api-core[grpc]==2.8.2 # via # feast (setup.py) # firebase-admin @@ -211,9 +213,10 @@ google-api-core[grpc]==1.31.6 # google-cloud-core # google-cloud-datastore # google-cloud-firestore -google-api-python-client==2.52.0 + # google-cloud-storage +google-api-python-client==2.53.0 # via firebase-admin -google-auth==1.35.0 +google-auth==2.9.1 # via # gcsfs # google-api-core @@ -226,36 +229,38 @@ google-auth-httplib2==0.1.0 # via google-api-python-client google-auth-oauthlib==0.5.2 # via gcsfs -google-cloud-bigquery==2.34.4 - # via feast (setup.py) -google-cloud-bigquery-storage==2.13.2 +google-cloud-bigquery[pandas]==3.2.0 # via feast (setup.py) -google-cloud-core==1.7.2 +google-cloud-bigquery-storage==2.14.1 # via # feast (setup.py) # google-cloud-bigquery +google-cloud-core==2.3.2 + # via + # google-cloud-bigquery # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-cloud-datastore==2.7.1 +google-cloud-datastore==2.8.0 # via feast (setup.py) -google-cloud-firestore==2.5.3 +google-cloud-firestore==2.6.0 # via firebase-admin -google-cloud-storage==1.40.0 +google-cloud-storage==2.4.0 # via # feast (setup.py) # firebase-admin # gcsfs google-crc32c==1.3.0 # via google-resumable-media -google-resumable-media==1.3.3 +google-resumable-media==2.3.3 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos==1.56.3 +googleapis-common-protos==1.56.4 # via # feast (setup.py) # google-api-core + # grpcio-status # tensorflow-metadata great-expectations==0.14.13 # via feast (setup.py) @@ -265,10 +270,13 @@ grpcio==1.47.0 # google-api-core # google-cloud-bigquery # grpcio-reflection + # grpcio-status # grpcio-testing # grpcio-tools grpcio-reflection==1.47.0 # via feast (setup.py) +grpcio-status==1.47.0 + # via google-api-core grpcio-testing==1.47.0 # via feast (setup.py) grpcio-tools==1.47.0 @@ -293,7 +301,7 @@ idna==3.3 # requests # snowflake-connector-python # yarl -imagesize==1.3.0 +imagesize==1.4.1 # via sphinx importlib-metadata==4.12.0 # via great-expectations @@ -322,13 +330,13 @@ jsonpatch==1.32 # via great-expectations jsonpointer==2.3 # via jsonpatch -jsonschema==4.6.1 +jsonschema==4.7.2 # via # altair # feast (setup.py) # great-expectations # nbformat -jupyter-core==4.10.0 +jupyter-core==4.11.1 # via nbformat locket==1.0.0 # via partd @@ -342,7 +350,7 @@ mccabe==0.6.1 # via flake8 minio==7.1.0 # via feast (setup.py) -mistune==2.0.3 +mistune==2.0.4 # via great-expectations mmh3==3.0.0 # via feast (setup.py) @@ -384,9 +392,10 @@ nbformat==5.4.0 # via great-expectations nodeenv==1.7.0 # via pre-commit -numpy==1.22.0 +numpy==1.23.1 # via # altair + # db-dtypes # feast (setup.py) # great-expectations # pandas @@ -401,8 +410,8 @@ packaging==21.3 # via # build # dask + # db-dtypes # deprecation - # google-api-core # google-cloud-bigquery # great-expectations # pytest @@ -411,7 +420,9 @@ packaging==21.3 pandas==1.4.3 # via # altair + # db-dtypes # feast (setup.py) + # google-cloud-bigquery # great-expectations # pandavro # snowflake-connector-python @@ -431,7 +442,7 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==6.7.0 +pip-tools==6.8.0 # via feast (setup.py) platformdirs==2.5.2 # via virtualenv @@ -439,9 +450,9 @@ pluggy==1.0.0 # via pytest ply==3.11 # via thriftpy2 -portalocker==2.4.0 +portalocker==2.5.1 # via msal-extensions -pre-commit==2.19.0 +pre-commit==2.20.0 # via feast (setup.py) prompt-toolkit==3.0.30 # via ipython @@ -462,6 +473,7 @@ protobuf==3.20.1 # google-cloud-firestore # googleapis-common-protos # grpcio-reflection + # grpcio-status # grpcio-testing # grpcio-tools # mypy-protobuf @@ -485,7 +497,9 @@ py4j==0.10.9.5 # via pyspark pyarrow==6.0.1 # via + # db-dtypes # feast (setup.py) + # google-cloud-bigquery # snowflake-connector-python pyasn1==0.4.8 # via @@ -568,7 +582,6 @@ python-dotenv==0.20.0 pytz==2022.1 # via # babel - # google-api-core # great-expectations # moto # pandas @@ -584,7 +597,7 @@ pyyaml==6.0 # uvicorn redis==4.2.2 # via feast (setup.py) -regex==2022.6.2 +regex==2022.7.9 # via black requests==2.28.1 # via @@ -627,11 +640,8 @@ six==1.16.0 # via # azure-core # azure-identity - # google-api-core # google-auth # google-auth-httplib2 - # google-cloud-core - # google-resumable-media # grpcio # happybase # mock @@ -695,7 +705,7 @@ tomli==2.0.1 # mypy # pep517 # pytest -toolz==0.11.2 +toolz==0.12.0 # via # altair # dask @@ -710,7 +720,7 @@ traitlets==5.3.0 # jupyter-core # matplotlib-inline # nbformat -trino==0.313.0 +trino==0.314.0 # via feast (setup.py) typed-ast==1.5.4 # via black @@ -724,19 +734,19 @@ types-python-dateutil==2.8.18 # via feast (setup.py) types-pytz==2022.1.1 # via feast (setup.py) -types-pyyaml==6.0.9 +types-pyyaml==6.0.10 # via feast (setup.py) -types-redis==4.3.3 +types-redis==4.3.4 # via feast (setup.py) -types-requests==2.28.0 +types-requests==2.28.2 # via feast (setup.py) -types-setuptools==57.4.18 +types-setuptools==63.2.0 # via feast (setup.py) types-tabulate==0.8.11 # via feast (setup.py) -types-urllib3==1.26.15 +types-urllib3==1.26.16 # via types-requests -typing-extensions==4.2.0 +typing-extensions==4.3.0 # via # azure-core # great-expectations @@ -749,7 +759,7 @@ tzlocal==4.2 # via great-expectations uritemplate==4.1.1 # via google-api-python-client -urllib3==1.26.9 +urllib3==1.26.10 # via # botocore # feast (setup.py) @@ -786,7 +796,7 @@ xmltodict==0.13.0 # via moto yarl==1.7.2 # via aiohttp -zipp==3.8.0 +zipp==3.8.1 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index d4a4425aec..0440a171ed 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=sdk/python/requirements/py3.10-requirements.txt # -absl-py==1.1.0 +absl-py==1.2.0 # via tensorflow-metadata anyio==3.6.1 # via @@ -38,7 +38,7 @@ dask==2022.1.1 # via feast (setup.py) dill==0.3.5.1 # via feast (setup.py) -fastapi==0.78.0 +fastapi==0.79.0 # via feast (setup.py) fastavro==1.5.2 # via @@ -50,9 +50,9 @@ fsspec==2022.5.0 # via dask google-api-core==2.8.2 # via feast (setup.py) -google-auth==2.9.0 +google-auth==2.9.1 # via google-api-core -googleapis-common-protos==1.56.3 +googleapis-common-protos==1.56.4 # via # feast (setup.py) # google-api-core @@ -73,7 +73,7 @@ idna==3.3 # requests jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.6.1 +jsonschema==4.7.2 # via feast (setup.py) locket==1.0.0 # via partd @@ -87,7 +87,7 @@ mypy==0.961 # via sqlalchemy mypy-extensions==0.4.3 # via mypy -numpy==1.22.0 +numpy==1.23.1 # via # feast (setup.py) # pandas @@ -170,7 +170,7 @@ toml==0.10.2 # via feast (setup.py) tomli==2.0.1 # via mypy -toolz==0.11.2 +toolz==0.12.0 # via # dask # partd @@ -178,12 +178,12 @@ tqdm==4.64.0 # via feast (setup.py) typeguard==2.13.3 # via feast (setup.py) -typing-extensions==4.2.0 +typing-extensions==4.3.0 # via # mypy # pydantic # sqlalchemy2-stubs -urllib3==1.26.9 +urllib3==1.26.10 # via requests uvicorn[standard]==0.18.2 # via feast (setup.py) diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 813420a121..67db3a978f 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -59,7 +59,7 @@ attrs==21.4.0 # pytest avro==1.10.0 # via feast (setup.py) -azure-core==1.24.1 +azure-core==1.24.2 # via # adlfs # azure-identity @@ -69,7 +69,7 @@ azure-datalake-store==0.0.52 # via adlfs azure-identity==1.10.0 # via adlfs -azure-storage-blob==12.12.0 +azure-storage-blob==12.13.0 # via adlfs babel==2.10.3 # via sphinx @@ -99,7 +99,7 @@ build==0.8.0 # pip-tools cachecontrol==0.12.11 # via firebase-admin -cachetools==4.2.4 +cachetools==5.2.0 # via google-auth certifi==2022.6.15 # via @@ -107,7 +107,7 @@ certifi==2022.6.15 # msrest # requests # snowflake-connector-python -cffi==1.15.0 +cffi==1.15.1 # via # azure-datalake-store # cryptography @@ -151,6 +151,8 @@ dask==2022.1.1 # via feast (setup.py) dataclasses==0.6 # via great-expectations +db-dtypes==1.0.2 + # via google-cloud-bigquery decorator==5.1.1 # via # gcsfs @@ -187,7 +189,7 @@ fastjsonschema==2.15.3 # via nbformat filelock==3.7.1 # via virtualenv -firebase-admin==4.5.2 +firebase-admin==5.2.0 # via feast (setup.py) fissix==21.11.13 # via bowler @@ -205,7 +207,7 @@ fsspec==2022.1.0 # s3fs gcsfs==2022.1.0 # via feast (setup.py) -google-api-core[grpc]==1.31.6 +google-api-core[grpc]==2.8.2 # via # feast (setup.py) # firebase-admin @@ -215,9 +217,10 @@ google-api-core[grpc]==1.31.6 # google-cloud-core # google-cloud-datastore # google-cloud-firestore + # google-cloud-storage google-api-python-client==2.52.0 # via firebase-admin -google-auth==1.35.0 +google-auth==2.9.0 # via # gcsfs # google-api-core @@ -230,29 +233,30 @@ google-auth-httplib2==0.1.0 # via google-api-python-client google-auth-oauthlib==0.5.2 # via gcsfs -google-cloud-bigquery==2.34.4 - # via feast (setup.py) -google-cloud-bigquery-storage==2.13.2 +google-cloud-bigquery[pandas]==3.2.0 # via feast (setup.py) -google-cloud-core==1.7.2 +google-cloud-bigquery-storage==2.14.0 # via # feast (setup.py) # google-cloud-bigquery +google-cloud-core==2.3.1 + # via + # google-cloud-bigquery # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-cloud-datastore==2.7.1 +google-cloud-datastore==2.7.2 # via feast (setup.py) google-cloud-firestore==2.5.3 # via firebase-admin -google-cloud-storage==1.40.0 +google-cloud-storage==2.4.0 # via # feast (setup.py) # firebase-admin # gcsfs google-crc32c==1.3.0 # via google-resumable-media -google-resumable-media==1.3.3 +google-resumable-media==2.3.3 # via # google-cloud-bigquery # google-cloud-storage @@ -260,6 +264,7 @@ googleapis-common-protos==1.56.3 # via # feast (setup.py) # google-api-core + # grpcio-status # tensorflow-metadata great-expectations==0.14.13 # via feast (setup.py) @@ -269,10 +274,13 @@ grpcio==1.47.0 # google-api-core # google-cloud-bigquery # grpcio-reflection + # grpcio-status # grpcio-testing # grpcio-tools grpcio-reflection==1.47.0 # via feast (setup.py) +grpcio-status==1.47.0 + # via google-api-core grpcio-testing==1.47.0 # via feast (setup.py) grpcio-tools==1.47.0 @@ -297,7 +305,7 @@ idna==3.3 # requests # snowflake-connector-python # yarl -imagesize==1.3.0 +imagesize==1.4.1 # via sphinx importlib-metadata==4.12.0 # via great-expectations @@ -328,13 +336,13 @@ jsonpatch==1.32 # via great-expectations jsonpointer==2.3 # via jsonpatch -jsonschema==4.6.1 +jsonschema==4.7.1 # via # altair # feast (setup.py) # great-expectations # nbformat -jupyter-core==4.10.0 +jupyter-core==4.11.1 # via nbformat locket==1.0.0 # via partd @@ -390,9 +398,10 @@ nbformat==5.4.0 # via great-expectations nodeenv==1.7.0 # via pre-commit -numpy==1.23.0 +numpy==1.23.1 # via # altair + # db-dtypes # feast (setup.py) # great-expectations # pandas @@ -407,8 +416,8 @@ packaging==21.3 # via # build # dask + # db-dtypes # deprecation - # google-api-core # google-cloud-bigquery # great-expectations # pytest @@ -417,7 +426,9 @@ packaging==21.3 pandas==1.4.3 # via # altair + # db-dtypes # feast (setup.py) + # google-cloud-bigquery # great-expectations # pandavro # snowflake-connector-python @@ -437,7 +448,7 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==6.7.0 +pip-tools==6.8.0 # via feast (setup.py) platformdirs==2.5.2 # via virtualenv @@ -445,9 +456,9 @@ pluggy==1.0.0 # via pytest ply==3.11 # via thriftpy2 -portalocker==2.4.0 +portalocker==2.5.1 # via msal-extensions -pre-commit==2.19.0 +pre-commit==2.20.0 # via feast (setup.py) prompt-toolkit==3.0.30 # via ipython @@ -468,6 +479,7 @@ protobuf==3.20.1 # google-cloud-firestore # googleapis-common-protos # grpcio-reflection + # grpcio-status # grpcio-testing # grpcio-tools # mypy-protobuf @@ -491,7 +503,9 @@ py4j==0.10.9.5 # via pyspark pyarrow==6.0.1 # via + # db-dtypes # feast (setup.py) + # google-cloud-bigquery # snowflake-connector-python pyasn1==0.4.8 # via @@ -574,7 +588,6 @@ python-dotenv==0.20.0 pytz==2022.1 # via # babel - # google-api-core # great-expectations # moto # pandas @@ -590,7 +603,7 @@ pyyaml==6.0 # uvicorn redis==4.2.2 # via feast (setup.py) -regex==2022.6.2 +regex==2022.7.9 # via black requests==2.28.1 # via @@ -635,11 +648,8 @@ six==1.16.0 # via # azure-core # azure-identity - # google-api-core # google-auth # google-auth-httplib2 - # google-cloud-core - # google-resumable-media # grpcio # happybase # mock @@ -703,7 +713,7 @@ tomli==2.0.1 # mypy # pep517 # pytest -toolz==0.11.2 +toolz==0.12.0 # via # altair # dask @@ -718,7 +728,7 @@ traitlets==5.3.0 # jupyter-core # matplotlib-inline # nbformat -trino==0.313.0 +trino==0.314.0 # via feast (setup.py) typed-ast==1.5.4 # via black @@ -738,13 +748,13 @@ types-redis==4.3.3 # via feast (setup.py) types-requests==2.28.0 # via feast (setup.py) -types-setuptools==57.4.18 +types-setuptools==62.6.1 # via feast (setup.py) types-tabulate==0.8.11 # via feast (setup.py) -types-urllib3==1.26.15 +types-urllib3==1.26.16 # via types-requests -typing-extensions==4.2.0 +typing-extensions==4.3.0 # via # aioitertools # azure-core @@ -759,7 +769,7 @@ tzlocal==4.2 # via great-expectations uritemplate==4.1.1 # via google-api-python-client -urllib3==1.26.9 +urllib3==1.26.10 # via # botocore # feast (setup.py) diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index 56e5e3adba..a09289f662 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -75,7 +75,7 @@ importlib-resources==5.8.0 # via jsonschema jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.6.1 +jsonschema==4.7.1 # via feast (setup.py) locket==1.0.0 # via partd @@ -89,7 +89,7 @@ mypy==0.961 # via sqlalchemy mypy-extensions==0.4.3 # via mypy -numpy==1.23.0 +numpy==1.23.1 # via # feast (setup.py) # pandas @@ -172,7 +172,7 @@ toml==0.10.2 # via feast (setup.py) tomli==2.0.1 # via mypy -toolz==0.11.2 +toolz==0.12.0 # via # dask # partd @@ -180,13 +180,13 @@ tqdm==4.64.0 # via feast (setup.py) typeguard==2.13.3 # via feast (setup.py) -typing-extensions==4.2.0 +typing-extensions==4.3.0 # via # mypy # pydantic # sqlalchemy2-stubs # starlette -urllib3==1.26.9 +urllib3==1.26.10 # via requests uvicorn[standard]==0.18.2 # via feast (setup.py) diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index fe1b8b2e13..8be0c36052 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -4,7 +4,7 @@ # # pip-compile --extra=ci --output-file=sdk/python/requirements/py3.9-ci-requirements.txt # -absl-py==1.1.0 +absl-py==1.2.0 # via tensorflow-metadata adal==1.2.7 # via @@ -59,7 +59,7 @@ attrs==21.4.0 # pytest avro==1.10.0 # via feast (setup.py) -azure-core==1.24.1 +azure-core==1.24.2 # via # adlfs # azure-identity @@ -69,7 +69,7 @@ azure-datalake-store==0.0.52 # via adlfs azure-identity==1.10.0 # via adlfs -azure-storage-blob==12.12.0 +azure-storage-blob==12.13.0 # via adlfs babel==2.10.3 # via sphinx @@ -95,7 +95,7 @@ build==0.8.0 # pip-tools cachecontrol==0.12.11 # via firebase-admin -cachetools==4.2.4 +cachetools==5.2.0 # via google-auth certifi==2022.6.15 # via @@ -103,7 +103,7 @@ certifi==2022.6.15 # msrest # requests # snowflake-connector-python -cffi==1.15.0 +cffi==1.15.1 # via # azure-datalake-store # cryptography @@ -130,7 +130,7 @@ colorama==0.4.5 # via # feast (setup.py) # great-expectations -coverage[toml]==6.4.1 +coverage[toml]==6.4.2 # via pytest-cov cryptography==35.0.0 # via @@ -147,6 +147,8 @@ dask==2022.1.1 # via feast (setup.py) dataclasses==0.6 # via great-expectations +db-dtypes==1.0.2 + # via google-cloud-bigquery decorator==5.1.1 # via # gcsfs @@ -157,7 +159,7 @@ deprecation==2.1.0 # via testcontainers dill==0.3.5.1 # via feast (setup.py) -distlib==0.3.4 +distlib==0.3.5 # via virtualenv docker==5.0.3 # via @@ -173,17 +175,17 @@ execnet==1.9.0 # via pytest-xdist executing==0.8.3 # via stack-data -fastapi==0.78.0 +fastapi==0.79.0 # via feast (setup.py) fastavro==1.5.2 # via # feast (setup.py) # pandavro -fastjsonschema==2.15.3 +fastjsonschema==2.16.1 # via nbformat filelock==3.7.1 # via virtualenv -firebase-admin==4.5.2 +firebase-admin==5.2.0 # via feast (setup.py) fissix==21.11.13 # via bowler @@ -201,7 +203,7 @@ fsspec==2022.1.0 # s3fs gcsfs==2022.1.0 # via feast (setup.py) -google-api-core[grpc]==1.31.6 +google-api-core[grpc]==2.8.2 # via # feast (setup.py) # firebase-admin @@ -211,9 +213,10 @@ google-api-core[grpc]==1.31.6 # google-cloud-core # google-cloud-datastore # google-cloud-firestore -google-api-python-client==2.52.0 + # google-cloud-storage +google-api-python-client==2.53.0 # via firebase-admin -google-auth==1.35.0 +google-auth==2.9.1 # via # gcsfs # google-api-core @@ -226,36 +229,38 @@ google-auth-httplib2==0.1.0 # via google-api-python-client google-auth-oauthlib==0.5.2 # via gcsfs -google-cloud-bigquery==2.34.4 - # via feast (setup.py) -google-cloud-bigquery-storage==2.13.2 +google-cloud-bigquery[pandas]==3.2.0 # via feast (setup.py) -google-cloud-core==1.7.2 +google-cloud-bigquery-storage==2.14.1 # via # feast (setup.py) # google-cloud-bigquery +google-cloud-core==2.3.2 + # via + # google-cloud-bigquery # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-cloud-datastore==2.7.1 +google-cloud-datastore==2.8.0 # via feast (setup.py) -google-cloud-firestore==2.5.3 +google-cloud-firestore==2.6.0 # via firebase-admin -google-cloud-storage==1.40.0 +google-cloud-storage==2.4.0 # via # feast (setup.py) # firebase-admin # gcsfs google-crc32c==1.3.0 # via google-resumable-media -google-resumable-media==1.3.3 +google-resumable-media==2.3.3 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos==1.56.3 +googleapis-common-protos==1.56.4 # via # feast (setup.py) # google-api-core + # grpcio-status # tensorflow-metadata great-expectations==0.14.13 # via feast (setup.py) @@ -265,10 +270,13 @@ grpcio==1.47.0 # google-api-core # google-cloud-bigquery # grpcio-reflection + # grpcio-status # grpcio-testing # grpcio-tools grpcio-reflection==1.47.0 # via feast (setup.py) +grpcio-status==1.47.0 + # via google-api-core grpcio-testing==1.47.0 # via feast (setup.py) grpcio-tools==1.47.0 @@ -293,7 +301,7 @@ idna==3.3 # requests # snowflake-connector-python # yarl -imagesize==1.3.0 +imagesize==1.4.1 # via sphinx importlib-metadata==4.12.0 # via great-expectations @@ -322,13 +330,13 @@ jsonpatch==1.32 # via great-expectations jsonpointer==2.3 # via jsonpatch -jsonschema==4.6.1 +jsonschema==4.7.2 # via # altair # feast (setup.py) # great-expectations # nbformat -jupyter-core==4.10.0 +jupyter-core==4.11.1 # via nbformat locket==1.0.0 # via partd @@ -342,7 +350,7 @@ mccabe==0.6.1 # via flake8 minio==7.1.0 # via feast (setup.py) -mistune==2.0.3 +mistune==2.0.4 # via great-expectations mmh3==3.0.0 # via feast (setup.py) @@ -384,9 +392,10 @@ nbformat==5.4.0 # via great-expectations nodeenv==1.7.0 # via pre-commit -numpy==1.22.0 +numpy==1.23.1 # via # altair + # db-dtypes # feast (setup.py) # great-expectations # pandas @@ -401,8 +410,8 @@ packaging==21.3 # via # build # dask + # db-dtypes # deprecation - # google-api-core # google-cloud-bigquery # great-expectations # pytest @@ -411,7 +420,9 @@ packaging==21.3 pandas==1.4.3 # via # altair + # db-dtypes # feast (setup.py) + # google-cloud-bigquery # great-expectations # pandavro # snowflake-connector-python @@ -431,7 +442,7 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==6.7.0 +pip-tools==6.8.0 # via feast (setup.py) platformdirs==2.5.2 # via virtualenv @@ -439,9 +450,9 @@ pluggy==1.0.0 # via pytest ply==3.11 # via thriftpy2 -portalocker==2.4.0 +portalocker==2.5.1 # via msal-extensions -pre-commit==2.19.0 +pre-commit==2.20.0 # via feast (setup.py) prompt-toolkit==3.0.30 # via ipython @@ -462,6 +473,7 @@ protobuf==3.20.1 # google-cloud-firestore # googleapis-common-protos # grpcio-reflection + # grpcio-status # grpcio-testing # grpcio-tools # mypy-protobuf @@ -485,7 +497,9 @@ py4j==0.10.9.5 # via pyspark pyarrow==6.0.1 # via + # db-dtypes # feast (setup.py) + # google-cloud-bigquery # snowflake-connector-python pyasn1==0.4.8 # via @@ -568,7 +582,6 @@ python-dotenv==0.20.0 pytz==2022.1 # via # babel - # google-api-core # great-expectations # moto # pandas @@ -584,7 +597,7 @@ pyyaml==6.0 # uvicorn redis==4.2.2 # via feast (setup.py) -regex==2022.6.2 +regex==2022.7.9 # via black requests==2.28.1 # via @@ -629,11 +642,8 @@ six==1.16.0 # via # azure-core # azure-identity - # google-api-core # google-auth # google-auth-httplib2 - # google-cloud-core - # google-resumable-media # grpcio # happybase # mock @@ -697,7 +707,7 @@ tomli==2.0.1 # mypy # pep517 # pytest -toolz==0.11.2 +toolz==0.12.0 # via # altair # dask @@ -712,7 +722,7 @@ traitlets==5.3.0 # jupyter-core # matplotlib-inline # nbformat -trino==0.313.0 +trino==0.314.0 # via feast (setup.py) typed-ast==1.5.4 # via black @@ -726,19 +736,19 @@ types-python-dateutil==2.8.18 # via feast (setup.py) types-pytz==2022.1.1 # via feast (setup.py) -types-pyyaml==6.0.9 +types-pyyaml==6.0.10 # via feast (setup.py) -types-redis==4.3.3 +types-redis==4.3.4 # via feast (setup.py) -types-requests==2.28.0 +types-requests==2.28.2 # via feast (setup.py) -types-setuptools==57.4.18 +types-setuptools==63.2.0 # via feast (setup.py) types-tabulate==0.8.11 # via feast (setup.py) -types-urllib3==1.26.15 +types-urllib3==1.26.16 # via types-requests -typing-extensions==4.2.0 +typing-extensions==4.3.0 # via # aioitertools # azure-core @@ -753,7 +763,7 @@ tzlocal==4.2 # via great-expectations uritemplate==4.1.1 # via google-api-python-client -urllib3==1.26.9 +urllib3==1.26.10 # via # botocore # feast (setup.py) @@ -790,7 +800,7 @@ xmltodict==0.13.0 # via moto yarl==1.7.2 # via aiohttp -zipp==3.8.0 +zipp==3.8.1 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 76e2815ed5..91e6602083 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=sdk/python/requirements/py3.9-requirements.txt # -absl-py==1.1.0 +absl-py==1.2.0 # via tensorflow-metadata anyio==3.6.1 # via @@ -38,7 +38,7 @@ dask==2022.1.1 # via feast (setup.py) dill==0.3.5.1 # via feast (setup.py) -fastapi==0.78.0 +fastapi==0.79.0 # via feast (setup.py) fastavro==1.5.2 # via @@ -50,9 +50,9 @@ fsspec==2022.5.0 # via dask google-api-core==2.8.2 # via feast (setup.py) -google-auth==2.9.0 +google-auth==2.9.1 # via google-api-core -googleapis-common-protos==1.56.3 +googleapis-common-protos==1.56.4 # via # feast (setup.py) # google-api-core @@ -73,7 +73,7 @@ idna==3.3 # requests jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.6.1 +jsonschema==4.7.2 # via feast (setup.py) locket==1.0.0 # via partd @@ -87,7 +87,7 @@ mypy==0.961 # via sqlalchemy mypy-extensions==0.4.3 # via mypy -numpy==1.22.0 +numpy==1.23.1 # via # feast (setup.py) # pandas @@ -170,7 +170,7 @@ toml==0.10.2 # via feast (setup.py) tomli==2.0.1 # via mypy -toolz==0.11.2 +toolz==0.12.0 # via # dask # partd @@ -178,13 +178,13 @@ tqdm==4.64.0 # via feast (setup.py) typeguard==2.13.3 # via feast (setup.py) -typing-extensions==4.2.0 +typing-extensions==4.3.0 # via # mypy # pydantic # sqlalchemy2-stubs # starlette -urllib3==1.26.9 +urllib3==1.26.10 # via requests uvicorn[standard]==0.18.2 # via feast (setup.py) diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py index 83bc1ef308..0f41176bd1 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py @@ -13,6 +13,7 @@ BigQueryLoggingDestination, SavedDatasetBigQueryStorage, ) +from feast.utils import make_df_tzaware from tests.integration.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) @@ -74,6 +75,10 @@ def create_data_source( f"{self.gcp_project}.{self.project_name}.{destination_name}" ) + # Make all datetime columns timezone aware. This should be the behaviour of + # `BigQueryOfflineStore.offline_write_batch`, but since we're bypassing that API here, we should follow the same + # rule. The schema of this initial dataframe determines the schema in the newly created BigQuery table. + df = make_df_tzaware(df) job = self.client.load_table_from_dataframe(df, destination_name) job.result() diff --git a/setup.py b/setup.py index deaf47f955..bbcd6ad3ec 100644 --- a/setup.py +++ b/setup.py @@ -80,11 +80,10 @@ ] GCP_REQUIRED = [ - "google-cloud-bigquery>=2,<3", + "google-cloud-bigquery[pandas]>=2,<4", "google-cloud-bigquery-storage >= 2.0.0,<3", "google-cloud-datastore>=2.1.*,<3", - "google-cloud-storage>=1.34.*,<1.41", - "google-cloud-core>=1.4.0,<2.0.0", + "google-cloud-storage>=1.34.*,<3", ] REDIS_REQUIRED = [ @@ -154,7 +153,7 @@ "sphinx-rtd-theme", "testcontainers>=3.5,<4", "adlfs==0.5.9", - "firebase-admin==4.5.2", + "firebase-admin>=5.2.0,<6", "pre-commit", "assertpy==1.1", "pip-tools", From b69eadc727f1ddc44fba1cec8834696c3b287d13 Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Mon, 18 Jul 2022 19:22:24 -0700 Subject: [PATCH 40/73] ci: Remove code coverage for now to keep from blocking other prs (#2950) * Remove code coverage for now to keep from blocking other prs Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang --- .github/workflows/pr_local_integration_tests.yml | 9 --------- 1 file changed, 9 deletions(-) diff --git a/.github/workflows/pr_local_integration_tests.yml b/.github/workflows/pr_local_integration_tests.yml index 4c87780888..d4db8a3a7c 100644 --- a/.github/workflows/pr_local_integration_tests.yml +++ b/.github/workflows/pr_local_integration_tests.yml @@ -78,12 +78,3 @@ jobs: - name: Test local integration tests if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak run: make test-python-integration-local - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: ./coverage.xml - flags: localintegrationtests - env_vars: OS,PYTHON - fail_ci_if_error: true - verbose: true From 3e3489c0295d1adf3119159b6a46314873abdbdf Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Tue, 19 Jul 2022 11:31:27 -0400 Subject: [PATCH 41/73] chore: Remove UI from quickstart colab (#2951) chore: Remove UI from quickstart colab since Colab only supports Python 3.7, which means it isn't showing the latest releases Signed-off-by: Danny Chiao --- examples/quickstart/quickstart.ipynb | 39 ---------------------------- 1 file changed, 39 deletions(-) diff --git a/examples/quickstart/quickstart.ipynb b/examples/quickstart/quickstart.ipynb index d29ee4fa35..c7d0fcfe54 100644 --- a/examples/quickstart/quickstart.ipynb +++ b/examples/quickstart/quickstart.ipynb @@ -949,45 +949,6 @@ "pprint(feature_vector)" ] }, - { - "cell_type": "markdown", - "metadata": { - "id": "_dBcqkaCnOYv" - }, - "source": [ - "## Step 7: Explore registered features with the Web UI" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/", - "height": 52 - }, - "id": "mCUPypyhl5TH", - "outputId": "fb2475c3-b254-42e6-b638-7982d52d2a19" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "nohup: appending output to 'nohup.out'\n", - "Open the Web UI at https://c6cuffvc4qm-496ff2e9c6d22116-8888-colab.googleusercontent.com/\n" - ] - } - ], - "source": [ - "from google.colab.output import eval_js\n", - "host = eval_js(\"google.colab.kernel.proxyPort(8888)\")\n", - "\n", - "!nohup feast ui &\n", - "\n", - "print(f\"Open the Web UI at {host}\")" - ] - }, { "cell_type": "markdown", "metadata": { From 92785b816e7e0057e84cf4da768bf3af135adc9f Mon Sep 17 00:00:00 2001 From: Abhin Chhabra Date: Tue, 19 Jul 2022 18:21:27 -0400 Subject: [PATCH 42/73] chore: Widen dependencies (#2928) * Widen dependencies. This should make it easier to install Feast alongside other Python packages. Signed-off-by: Abhin Chhabra Signed-off-by: Kevin Zhang * Exclude embedded_go/lib from black Signed-off-by: Abhin Chhabra Signed-off-by: Kevin Zhang * Generate lockfiles for Python3.9 and Python 3.10 Signed-off-by: Abhin Chhabra Signed-off-by: Kevin Zhang * Switch to `google-cloud-bigquery[pandas]`. This was covered in https://github.com/feast-dev/feast/issues/2537. I've only generated lockfiles for Python 3.8 to test this in CI. I'll do the same for the rest of the Python versions later. Signed-off-by: Abhin Chhabra Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Update 3.9 reqs Signed-off-by: Kevin Zhang * UPdate 3.10 Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix lint Signed-off-by: Kevin Zhang * Fix dependencies Signed-off-by: Kevin Zhang Co-authored-by: Kevin Zhang --- pyproject.toml | 1 + sdk/python/feast/cli.py | 30 +++++++-- sdk/python/feast/data_source.py | 5 +- sdk/python/feast/diff/infra_diff.py | 10 ++- sdk/python/feast/diff/registry_diff.py | 16 +++-- sdk/python/feast/driver_test_data.py | 7 +- .../embedded_go/online_features_service.py | 7 +- sdk/python/feast/feature.py | 9 ++- sdk/python/feast/feature_logging.py | 4 +- sdk/python/feast/feature_store.py | 66 +++++++++++++------ sdk/python/feast/feature_view.py | 5 +- sdk/python/feast/field.py | 6 +- sdk/python/feast/inference.py | 13 +++- sdk/python/feast/infra/aws.py | 5 +- .../batch_materialization_engine.py | 2 +- .../feast/infra/materialization/lambda/app.py | 11 ++-- .../feast/infra/offline_stores/bigquery.py | 31 ++++++--- .../infra/offline_stores/bigquery_source.py | 10 ++- .../postgres_offline_store/postgres.py | 9 ++- .../tests/data_source.py | 9 ++- .../contrib/spark_offline_store/spark.py | 7 +- .../spark_offline_store/spark_source.py | 10 ++- .../test_config/manual_tests.py | 3 +- .../trino_offline_store/tests/data_source.py | 5 +- .../contrib/trino_offline_store/trino.py | 21 ++++-- .../trino_offline_store/trino_source.py | 6 +- sdk/python/feast/infra/offline_stores/file.py | 36 +++++++--- .../infra/offline_stores/offline_store.py | 6 +- .../feast/infra/offline_stores/redshift.py | 9 ++- .../feast/infra/offline_stores/snowflake.py | 8 ++- .../infra/offline_stores/snowflake_source.py | 4 +- .../infra/online_stores/contrib/postgres.py | 9 ++- .../feast/infra/online_stores/datastore.py | 15 ++++- .../feast/infra/online_stores/dynamodb.py | 6 +- .../feast/infra/online_stores/sqlite.py | 5 +- .../feast/infra/passthrough_provider.py | 10 ++- sdk/python/feast/infra/provider.py | 14 +++- sdk/python/feast/infra/registry_stores/sql.py | 25 +++++-- sdk/python/feast/infra/utils/aws_utils.py | 43 ++++++++++-- .../infra/utils/postgres/connection_utils.py | 5 +- .../feast/infra/utils/snowflake_utils.py | 5 +- sdk/python/feast/on_demand_feature_view.py | 17 +++-- sdk/python/feast/registry.py | 46 +++++++------ sdk/python/feast/repo_config.py | 16 +++-- sdk/python/feast/stream_feature_view.py | 3 +- sdk/python/feast/templates/aws/test.py | 3 +- sdk/python/feast/templates/gcp/test.py | 3 +- .../feast/templates/postgres/driver_repo.py | 5 +- sdk/python/feast/templates/postgres/test.py | 3 +- sdk/python/feast/templates/snowflake/test.py | 3 +- sdk/python/feast/templates/spark/example.py | 10 ++- sdk/python/feast/types.py | 4 +- sdk/python/feast/ui_server.py | 4 +- sdk/python/feast/utils.py | 6 +- .../requirements/py3.10-ci-requirements.txt | 23 +++---- .../requirements/py3.10-requirements.txt | 4 +- .../requirements/py3.8-ci-requirements.txt | 62 +++++++++-------- .../requirements/py3.8-requirements.txt | 16 ++--- .../requirements/py3.9-ci-requirements.txt | 24 ++++--- .../requirements/py3.9-requirements.txt | 4 +- ...st_benchmark_universal_online_retrieval.py | 4 +- sdk/python/tests/conftest.py | 12 +++- sdk/python/tests/doctest/test_all.py | 8 ++- .../example_repos/example_feature_repo_1.py | 6 +- .../example_repos/example_feature_repo_2.py | 5 +- .../example_feature_repo_version_0_19.py | 3 +- ..._repo_with_duplicated_featureview_names.py | 6 +- ...ample_feature_repo_with_entity_join_key.py | 6 +- .../on_demand_feature_view_repo.py | 5 +- sdk/python/tests/foo_provider.py | 5 +- .../integration/e2e/test_go_feature_server.py | 4 +- .../e2e/test_python_feature_server.py | 5 +- .../tests/integration/e2e/test_validation.py | 16 +++-- .../feature_repos/repo_configuration.py | 16 +++-- .../universal/data_sources/file.py | 4 +- .../feature_repos/universal/feature_views.py | 6 +- .../materialization/test_lambda.py | 10 ++- .../offline_store/test_feature_logging.py | 6 +- .../offline_store/test_s3_custom_endpoint.py | 4 +- .../test_universal_historical_retrieval.py | 9 ++- .../online_store/test_e2e_local.py | 3 +- .../online_store/test_universal_online.py | 5 +- .../integration/registration/test_cli.py | 3 +- .../registration/test_feature_store.py | 23 +++++-- .../registration/test_inference.py | 19 ++++-- .../integration/registration/test_registry.py | 52 ++++++++++----- .../registration/test_sql_registry.py | 34 +++++++--- .../test_stream_feature_view_apply.py | 11 +++- .../test_universal_odfv_feature_inference.py | 6 +- .../registration/test_universal_types.py | 39 +++++++---- .../tests/unit/diff/test_registry_diff.py | 15 ++++- sdk/python/tests/unit/test_data_sources.py | 18 +++-- sdk/python/tests/unit/test_entity.py | 4 +- sdk/python/tests/unit/test_feature_views.py | 8 ++- .../tests/unit/test_on_demand_feature_view.py | 3 +- sdk/python/tests/unit/test_usage.py | 2 +- sdk/python/tests/utils/data_source_utils.py | 9 ++- setup.py | 56 +++++++++------- 98 files changed, 845 insertions(+), 369 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8ba7254440..1fef4c27c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ exclude = ''' | pb2.py | \.pyi | protos + | sdk/python/feast/embedded_go/lib )/ ) ''' diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index 91815d30fd..99e084b666 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -524,7 +524,10 @@ def registry_dump_command(ctx: click.Context): @click.argument("start_ts") @click.argument("end_ts") @click.option( - "--views", "-v", help="Feature views to materialize", multiple=True, + "--views", + "-v", + help="Feature views to materialize", + multiple=True, ) @click.pass_context def materialize_command( @@ -551,7 +554,10 @@ def materialize_command( @cli.command("materialize-incremental") @click.argument("end_ts") @click.option( - "--views", "-v", help="Feature views to incrementally materialize", multiple=True, + "--views", + "-v", + help="Feature views to incrementally materialize", + multiple=True, ) @click.pass_context def materialize_incremental_command(ctx: click.Context, end_ts: str, views: List[str]): @@ -622,10 +628,14 @@ def init_command(project_directory, minimal: bool, template: str): help="Specify a server type: 'http' or 'grpc' [default: http]", ) @click.option( - "--no-access-log", is_flag=True, help="Disable the Uvicorn access log.", + "--no-access-log", + is_flag=True, + help="Disable the Uvicorn access log.", ) @click.option( - "--no-feature-log", is_flag=True, help="Disable logging served features", + "--no-feature-log", + is_flag=True, + help="Disable logging served features", ) @click.pass_context def serve_command( @@ -771,13 +781,19 @@ def disable_alpha_features(ctx: click.Context): @cli.command("validate") @click.option( - "--feature-service", "-f", help="Specify a feature service name", + "--feature-service", + "-f", + help="Specify a feature service name", ) @click.option( - "--reference", "-r", help="Specify a validation reference name", + "--reference", + "-r", + help="Specify a validation reference name", ) @click.option( - "--no-profile-cache", is_flag=True, help="Do not store cached profile in registry", + "--no-profile-cache", + is_flag=True, + help="Do not store cached profile in registry", ) @click.argument("start_ts") @click.argument("end_ts") diff --git a/sdk/python/feast/data_source.py b/sdk/python/feast/data_source.py index f5c40d2421..6ab7934371 100644 --- a/sdk/python/feast/data_source.py +++ b/sdk/python/feast/data_source.py @@ -116,7 +116,10 @@ class KinesisOptions: """ def __init__( - self, record_format: StreamFormat, region: str, stream_name: str, + self, + record_format: StreamFormat, + region: str, + stream_name: str, ): self.record_format = record_format self.region = region diff --git a/sdk/python/feast/diff/infra_diff.py b/sdk/python/feast/diff/infra_diff.py index a09eaf39eb..51bece33dd 100644 --- a/sdk/python/feast/diff/infra_diff.py +++ b/sdk/python/feast/diff/infra_diff.py @@ -126,7 +126,8 @@ def diff_infra_protos( infra_objects_to_delete, infra_objects_to_add, ) = tag_infra_proto_objects_for_keep_delete_add( - current_infra_objects, new_infra_objects, + current_infra_objects, + new_infra_objects, ) for e in infra_objects_to_add: @@ -199,5 +200,10 @@ def diff_between( ) ) return InfraObjectDiff( - new.name, infra_object_type, current, new, property_diffs, transition, + new.name, + infra_object_type, + current, + new, + property_diffs, + transition, ) diff --git a/sdk/python/feast/diff/registry_diff.py b/sdk/python/feast/diff/registry_diff.py index 7a5b9b7564..fc0acf0223 100644 --- a/sdk/python/feast/diff/registry_diff.py +++ b/sdk/python/feast/diff/registry_diff.py @@ -161,7 +161,9 @@ def diff_registry_objects( def extract_objects_for_keep_delete_update_add( - registry: BaseRegistry, current_project: str, desired_repo_contents: RepoContents, + registry: BaseRegistry, + current_project: str, + desired_repo_contents: RepoContents, ) -> Tuple[ Dict[FeastObjectType, Set[FeastObject]], Dict[FeastObjectType, Set[FeastObject]], @@ -208,7 +210,9 @@ def extract_objects_for_keep_delete_update_add( def diff_between( - registry: BaseRegistry, current_project: str, desired_repo_contents: RepoContents, + registry: BaseRegistry, + current_project: str, + desired_repo_contents: RepoContents, ) -> RegistryDiff: """ Returns the difference between the current and desired repo states. @@ -305,12 +309,16 @@ def apply_diff_to_registry( BaseFeatureView, feast_object_diff.current_feast_object ) registry.delete_feature_view( - feature_view_obj.name, project, commit=False, + feature_view_obj.name, + project, + commit=False, ) elif feast_object_diff.feast_object_type == FeastObjectType.DATA_SOURCE: ds_obj = cast(DataSource, feast_object_diff.current_feast_object) registry.delete_data_source( - ds_obj.name, project, commit=False, + ds_obj.name, + project, + commit=False, ) if feast_object_diff.transition_type in [ diff --git a/sdk/python/feast/driver_test_data.py b/sdk/python/feast/driver_test_data.py index 991b5391e8..da9d061313 100644 --- a/sdk/python/feast/driver_test_data.py +++ b/sdk/python/feast/driver_test_data.py @@ -30,7 +30,12 @@ def _convert_event_timestamp(event_timestamp: pd.Timestamp, t: EventTimestampTyp def create_orders_df( - customers, drivers, start_date, end_date, order_count, locations=None, + customers, + drivers, + start_date, + end_date, + order_count, + locations=None, ) -> pd.DataFrame: """ Example df generated by this function (if locations): diff --git a/sdk/python/feast/embedded_go/online_features_service.py b/sdk/python/feast/embedded_go/online_features_service.py index d9b34b2414..bf82fab6a3 100644 --- a/sdk/python/feast/embedded_go/online_features_service.py +++ b/sdk/python/feast/embedded_go/online_features_service.py @@ -50,7 +50,8 @@ def __init__( ) self._service = NewOnlineFeatureService( - self._config, self._transformation_callback, + self._config, + self._transformation_callback, ) # This should raise an exception if there were any errors in NewOnlineFeatureService. @@ -263,7 +264,9 @@ def transformation_callback( def logging_callback( - fs: "FeatureStore", feature_service_name: str, dataset_dir: str, + fs: "FeatureStore", + feature_service_name: str, + dataset_dir: str, ) -> bytes: feature_service = fs.get_feature_service(feature_service_name, allow_cache=True) try: diff --git a/sdk/python/feast/feature.py b/sdk/python/feast/feature.py index d1f96c302a..6b5acd9fc6 100644 --- a/sdk/python/feast/feature.py +++ b/sdk/python/feast/feature.py @@ -30,7 +30,10 @@ class Feature: """ def __init__( - self, name: str, dtype: ValueType, labels: Optional[Dict[str, str]] = None, + self, + name: str, + dtype: ValueType, + labels: Optional[Dict[str, str]] = None, ): """Creates a Feature object.""" self._name = name @@ -91,7 +94,9 @@ def to_proto(self) -> FeatureSpecProto: value_type = ValueTypeProto.Enum.Value(self.dtype.name) return FeatureSpecProto( - name=self.name, value_type=value_type, tags=self.labels, + name=self.name, + value_type=value_type, + tags=self.labels, ) @classmethod diff --git a/sdk/python/feast/feature_logging.py b/sdk/python/feast/feature_logging.py index 275bde72ec..da9a0c9fe5 100644 --- a/sdk/python/feast/feature_logging.py +++ b/sdk/python/feast/feature_logging.py @@ -34,12 +34,12 @@ class LoggingSource: @abc.abstractmethod def get_schema(self, registry: "BaseRegistry") -> pa.Schema: - """ Generate schema for logs destination. """ + """Generate schema for logs destination.""" raise NotImplementedError @abc.abstractmethod def get_log_timestamp_column(self) -> str: - """ Return timestamp column that must exist in generated schema. """ + """Return timestamp column that must exist in generated schema.""" raise NotImplementedError diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 4856a46f8c..ce2c98e1ea 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -118,7 +118,9 @@ class FeatureStore: @log_exceptions def __init__( - self, repo_path: Optional[str] = None, config: Optional[RepoConfig] = None, + self, + repo_path: Optional[str] = None, + config: Optional[RepoConfig] = None, ): """ Creates a FeatureStore object. @@ -253,7 +255,9 @@ def list_request_feature_views( ) def _list_feature_views( - self, allow_cache: bool = False, hide_dummy_entity: bool = True, + self, + allow_cache: bool = False, + hide_dummy_entity: bool = True, ) -> List[FeatureView]: feature_views = [] for fv in self._registry.list_feature_views( @@ -266,7 +270,9 @@ def _list_feature_views( return feature_views def _list_stream_feature_views( - self, allow_cache: bool = False, hide_dummy_entity: bool = True, + self, + allow_cache: bool = False, + hide_dummy_entity: bool = True, ) -> List[StreamFeatureView]: stream_feature_views = [] for sfv in self._registry.list_stream_feature_views( @@ -480,7 +486,9 @@ def delete_feature_service(self, name: str): return self._registry.delete_feature_service(name, self.project) def _get_features( - self, features: Union[List[str], FeatureService], allow_cache: bool = False, + self, + features: Union[List[str], FeatureService], + allow_cache: bool = False, ) -> List[str]: _features = features @@ -589,7 +597,8 @@ def _make_inferences( feature_service.infer_features(fvs_to_update=fvs_to_update_map) def _get_feature_views_to_materialize( - self, feature_views: Optional[List[str]], + self, + feature_views: Optional[List[str]], ) -> List[FeatureView]: """ Returns the list of feature views that should be materialized. @@ -1069,7 +1078,8 @@ def get_historical_features( for feature_name in odfv_request_data_schema.keys(): if feature_name not in entity_df.columns: raise RequestDataNotFoundInEntityDfException( - feature_name=feature_name, feature_view_name=odfv.name, + feature_name=feature_name, + feature_view_name=odfv.name, ) _validate_feature_refs(_feature_refs, full_feature_names) @@ -1182,7 +1192,9 @@ def get_saved_dataset(self, name: str) -> SavedDataset: @log_exceptions_and_usage def materialize_incremental( - self, end_date: datetime, feature_views: Optional[List[str]] = None, + self, + end_date: datetime, + feature_views: Optional[List[str]] = None, ) -> None: """ Materialize incremental new data from the offline store into the online store. @@ -1264,7 +1276,10 @@ def tqdm_builder(length): ) self._registry.apply_materialization( - feature_view, self.project, start_date, end_date, + feature_view, + self.project, + start_date, + end_date, ) @log_exceptions_and_usage @@ -1336,7 +1351,10 @@ def tqdm_builder(length): ) self._registry.apply_materialization( - feature_view, self.project, start_date, end_date, + feature_view, + self.project, + start_date, + end_date, ) @log_exceptions_and_usage @@ -1439,8 +1457,8 @@ def write_to_offline_store( ) # Get columns of the batch source and the input dataframe. - column_names_and_types = feature_view.batch_source.get_table_column_names_and_types( - self.config + column_names_and_types = ( + feature_view.batch_source.get_table_column_names_and_types(self.config) ) source_columns = [column for column, _ in column_names_and_types] input_columns = df.columns.values.tolist() @@ -1701,12 +1719,17 @@ def _get_online_features( for table, requested_features in grouped_refs: # Get the correct set of entity values with the correct join keys. table_entity_values, idxs = self._get_unique_entities( - table, join_key_values, entity_name_to_join_key_map, + table, + join_key_values, + entity_name_to_join_key_map, ) # Fetch feature data for the minimum set of Entities. feature_data = self._read_from_online_store( - table_entity_values, provider, requested_features, table, + table_entity_values, + provider, + requested_features, + table, ) # Populate the result_rows with the Features from the OnlineStore inplace. @@ -1875,7 +1898,9 @@ def _get_unique_entities( """ # Get the correct set of entity values with the correct join keys. table_entity_values = self._get_table_entity_values( - table, entity_name_to_join_key_map, join_key_values, + table, + entity_name_to_join_key_map, + join_key_values, ) # Convert back to rowise. @@ -2060,7 +2085,8 @@ def _augment_response_with_on_demand_transforms( for odfv_name, _feature_refs in odfv_feature_refs.items(): odfv = requested_odfv_map[odfv_name] transformed_features_df = odfv.get_transformed_features_df( - initial_response_df, full_feature_names, + initial_response_df, + full_feature_names, ) selected_subset = [ f for f in transformed_features_df.columns if f in _feature_refs @@ -2117,9 +2143,7 @@ def _get_feature_views_to_use( features: Optional[Union[List[str], FeatureService]], allow_cache=False, hide_dummy_entity: bool = True, - ) -> Tuple[ - List[FeatureView], List[RequestFeatureView], List[OnDemandFeatureView], - ]: + ) -> Tuple[List[FeatureView], List[RequestFeatureView], List[OnDemandFeatureView]]: fvs = { fv.name: fv @@ -2364,10 +2388,10 @@ def get_validation_reference( self, name: str, allow_cache: bool = False ) -> ValidationReference: """ - Retrieves a validation reference. + Retrieves a validation reference. - Raises: - ValidationReferenceNotFoundException: The validation reference could not be found. + Raises: + ValidationReferenceNotFoundException: The validation reference could not be found. """ ref = self._registry.get_validation_reference( name, project=self.project, allow_cache=allow_cache diff --git a/sdk/python/feast/feature_view.py b/sdk/python/feast/feature_view.py index dd8cb4f0a6..0310376646 100644 --- a/sdk/python/feast/feature_view.py +++ b/sdk/python/feast/feature_view.py @@ -44,7 +44,10 @@ DUMMY_ENTITY_ID = "__dummy_id" DUMMY_ENTITY_NAME = "__dummy" DUMMY_ENTITY_VAL = "" -DUMMY_ENTITY = Entity(name=DUMMY_ENTITY_NAME, join_keys=[DUMMY_ENTITY_ID],) +DUMMY_ENTITY = Entity( + name=DUMMY_ENTITY_NAME, + join_keys=[DUMMY_ENTITY_ID], +) @typechecked diff --git a/sdk/python/feast/field.py b/sdk/python/feast/field.py index d0b4274cd2..a3dc3732da 100644 --- a/sdk/python/feast/field.py +++ b/sdk/python/feast/field.py @@ -38,7 +38,11 @@ class Field: tags: Dict[str, str] def __init__( - self, *, name: str, dtype: FeastType, tags: Optional[Dict[str, str]] = None, + self, + *, + name: str, + dtype: FeastType, + tags: Optional[Dict[str, str]] = None, ): """ Creates a Field object. diff --git a/sdk/python/feast/inference.py b/sdk/python/feast/inference.py index 011a3b99b2..0b8e42b4e9 100644 --- a/sdk/python/feast/inference.py +++ b/sdk/python/feast/inference.py @@ -144,7 +144,8 @@ def update_feature_views_with_inferred_features_and_entities( ): fv.entity_columns.append( Field( - name=entity.join_key, dtype=from_value_type(entity.value_type), + name=entity.join_key, + dtype=from_value_type(entity.value_type), ) ) @@ -166,7 +167,10 @@ def update_feature_views_with_inferred_features_and_entities( if run_inference_for_entities or run_inference_for_features: _infer_features_and_entities( - fv, join_keys, run_inference_for_features, config, + fv, + join_keys, + run_inference_for_features, + config, ) if not fv.features: @@ -177,7 +181,10 @@ def update_feature_views_with_inferred_features_and_entities( def _infer_features_and_entities( - fv: FeatureView, join_keys: Set[str], run_inference_for_features, config, + fv: FeatureView, + join_keys: Set[str], + run_inference_for_features, + config, ) -> None: """ Updates the specific feature in place with inferred features and entities. diff --git a/sdk/python/feast/infra/aws.py b/sdk/python/feast/infra/aws.py index 4109856e60..145c55952e 100644 --- a/sdk/python/feast/infra/aws.py +++ b/sdk/python/feast/infra/aws.py @@ -205,7 +205,10 @@ def _deploy_feature_server(self, project: str, image_uri: str): @log_exceptions_and_usage(provider="AwsProvider") def teardown_infra( - self, project: str, tables: Sequence[FeatureView], entities: Sequence[Entity], + self, + project: str, + tables: Sequence[FeatureView], + entities: Sequence[Entity], ) -> None: super(AwsProvider, self).teardown_infra(project, tables, entities) diff --git a/sdk/python/feast/infra/materialization/batch_materialization_engine.py b/sdk/python/feast/infra/materialization/batch_materialization_engine.py index 773c685d6e..1890ffed5a 100644 --- a/sdk/python/feast/infra/materialization/batch_materialization_engine.py +++ b/sdk/python/feast/infra/materialization/batch_materialization_engine.py @@ -96,7 +96,7 @@ def update( entities_to_keep: Sequence[Entity], ): """This method ensures that any necessary infrastructure or resources needed by the - engine are set up ahead of materialization.""" + engine are set up ahead of materialization.""" @abstractmethod def materialize( diff --git a/sdk/python/feast/infra/materialization/lambda/app.py b/sdk/python/feast/infra/materialization/lambda/app.py index ebed4c96e0..375674adaa 100644 --- a/sdk/python/feast/infra/materialization/lambda/app.py +++ b/sdk/python/feast/infra/materialization/lambda/app.py @@ -16,9 +16,9 @@ def handler(event, context): """Provide an event that contains the following keys: - - operation: one of the operations in the operations dict below - - tableName: required for operations that interact with DynamoDB - - payload: a parameter to pass to the operation being performed + - operation: one of the operations in the operations dict below + - tableName: required for operations that interact with DynamoDB + - payload: a parameter to pass to the operation being performed """ print("Received event: " + json.dumps(event, indent=2), flush=True) @@ -71,7 +71,10 @@ def handler(event, context): batch, feature_view, join_key_to_value_type ) store._provider.online_write_batch( - store.config, feature_view, rows_to_write, lambda x: None, + store.config, + feature_view, + rows_to_write, + lambda x: None, ) written_rows += len(rows_to_write) return {"written_rows": written_rows} diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index 982045607d..6c2bef757a 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -134,7 +134,10 @@ def pull_latest_from_table_or_query( # When materializing a single feature view, we don't need full feature names. On demand transforms aren't materialized return BigQueryRetrievalJob( - query=query, client=client, config=config, full_feature_names=False, + query=query, + client=client, + config=config, + full_feature_names=False, ) @staticmethod @@ -164,7 +167,10 @@ def pull_all_from_table_or_query( WHERE {timestamp_field} BETWEEN TIMESTAMP('{start_date}') AND TIMESTAMP('{end_date}') """ return BigQueryRetrievalJob( - query=query, client=client, config=config, full_feature_names=False, + query=query, + client=client, + config=config, + full_feature_names=False, ) @staticmethod @@ -195,20 +201,27 @@ def get_historical_features( config.offline_store.location, ) - entity_schema = _get_entity_schema(client=client, entity_df=entity_df,) + entity_schema = _get_entity_schema( + client=client, + entity_df=entity_df, + ) - entity_df_event_timestamp_col = offline_utils.infer_event_timestamp_from_entity_df( - entity_schema + entity_df_event_timestamp_col = ( + offline_utils.infer_event_timestamp_from_entity_df(entity_schema) ) entity_df_event_timestamp_range = _get_entity_df_event_timestamp_range( - entity_df, entity_df_event_timestamp_col, client, + entity_df, + entity_df_event_timestamp_col, + client, ) @contextlib.contextmanager def query_generator() -> Iterator[str]: _upload_entity_df( - client=client, table_name=table_reference, entity_df=entity_df, + client=client, + table_name=table_reference, + entity_df=entity_df, ) expected_join_keys = offline_utils.get_expected_join_keys( @@ -598,7 +611,9 @@ def _get_table_reference_for_new_entity( def _upload_entity_df( - client: Client, table_name: str, entity_df: Union[pd.DataFrame, str], + client: Client, + table_name: str, + entity_df: Union[pd.DataFrame, str], ) -> Table: """Uploads a Pandas entity dataframe into a BigQuery table and returns the resulting table""" job: Union[bigquery.job.query.QueryJob, bigquery.job.load.LoadJob] diff --git a/sdk/python/feast/infra/offline_stores/bigquery_source.py b/sdk/python/feast/infra/offline_stores/bigquery_source.py index b06cc23369..bb8316869b 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery_source.py +++ b/sdk/python/feast/infra/offline_stores/bigquery_source.py @@ -204,7 +204,9 @@ class BigQueryOptions: """ def __init__( - self, table: Optional[str], query: Optional[str], + self, + table: Optional[str], + query: Optional[str], ): self.table = table or "" self.query = query or "" @@ -221,7 +223,8 @@ def from_proto(cls, bigquery_options_proto: DataSourceProto.BigQueryOptions): Returns a BigQueryOptions object based on the bigquery_options protobuf """ bigquery_options = cls( - table=bigquery_options_proto.table, query=bigquery_options_proto.query, + table=bigquery_options_proto.table, + query=bigquery_options_proto.query, ) return bigquery_options @@ -234,7 +237,8 @@ def to_proto(self) -> DataSourceProto.BigQueryOptions: BigQueryOptionsProto protobuf """ bigquery_options_proto = DataSourceProto.BigQueryOptions( - table=self.table, query=self.query, + table=self.table, + query=self.query, ) return bigquery_options_proto diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py index f2aa535c1d..415a46dde7 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py @@ -127,8 +127,8 @@ def query_generator() -> Iterator[str]: else: raise TypeError(entity_df) - entity_df_event_timestamp_col = offline_utils.infer_event_timestamp_from_entity_df( - entity_schema + entity_df_event_timestamp_col = ( + offline_utils.infer_event_timestamp_from_entity_df(entity_schema) ) expected_join_keys = offline_utils.get_expected_join_keys( @@ -140,7 +140,10 @@ def query_generator() -> Iterator[str]: ) entity_df_event_timestamp_range = _get_entity_df_event_timestamp_range( - entity_df, entity_df_event_timestamp_col, config, df_query, + entity_df, + entity_df_event_timestamp_col, + config, + df_query, ) query_context = offline_utils.get_feature_view_query_context( diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py index 6671a47765..c84fce03dc 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py @@ -40,7 +40,10 @@ def postgres_container(): log_string_to_wait_for = "database system is ready to accept connections" waited = wait_for_logs( - container=container, predicate=log_string_to_wait_for, timeout=30, interval=10, + container=container, + predicate=log_string_to_wait_for, + timeout=30, + interval=10, ) logger.info("Waited for %s seconds until postgres container was up", waited) @@ -52,7 +55,9 @@ class PostgreSQLDataSourceCreator(DataSourceCreator, OnlineStoreCreator): def __init__( self, project_name: str, fixture_request: pytest.FixtureRequest, **kwargs ): - super().__init__(project_name,) + super().__init__( + project_name, + ) self.project_name = project_name self.container = fixture_request.getfixturevalue("postgres_container") diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py index 2a0925d929..8e0badd732 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py @@ -130,13 +130,16 @@ def get_historical_features( tmp_entity_df_table_name = offline_utils.get_temp_entity_table_name() entity_schema = _get_entity_schema( - spark_session=spark_session, entity_df=entity_df, + spark_session=spark_session, + entity_df=entity_df, ) event_timestamp_col = offline_utils.infer_event_timestamp_from_entity_df( entity_schema=entity_schema, ) entity_df_event_timestamp_range = _get_entity_df_event_timestamp_range( - entity_df, event_timestamp_col, spark_session, + entity_df, + event_timestamp_col, + spark_session, ) _upload_entity_df( spark_session=spark_session, diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py index ade1e54365..0ddeaad354 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py @@ -80,7 +80,10 @@ def __init__( RuntimeWarning, ) self.spark_options = SparkOptions( - table=table, query=query, path=path, file_format=file_format, + table=table, + query=query, + path=path, + file_format=file_format, ) @property @@ -304,7 +307,10 @@ def __init__( file_format: Optional[str] = None, ): self.spark_options = SparkOptions( - table=table, query=query, path=path, file_format=file_format, + table=table, + query=query, + path=path, + file_format=file_format, ) @staticmethod diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py index 9c73f01819..7d31aa90fb 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py @@ -7,6 +7,7 @@ FULL_REPO_CONFIGS = [ IntegrationTestRepoConfig( - provider="local", offline_store_creator=TrinoSourceCreator, + provider="local", + offline_store_creator=TrinoSourceCreator, ), ] diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py index f2b9f785a0..67efa6a27f 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py @@ -63,7 +63,10 @@ def __init__( ) self.exposed_port = self.container.get_exposed_port("8080") self.client = Trino( - user="user", catalog="memory", host="localhost", port=self.exposed_port, + user="user", + catalog="memory", + host="localhost", + port=self.exposed_port, ) def teardown(self): diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py index 87a99b820e..88a9021d1c 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py @@ -202,7 +202,10 @@ def pull_latest_from_table_or_query( # When materializing a single feature view, we don't need full feature names. On demand transforms aren't materialized return TrinoRetrievalJob( - query=query, client=client, config=config, full_feature_names=False, + query=query, + client=client, + config=config, + full_feature_names=False, ) @staticmethod @@ -240,8 +243,10 @@ def get_historical_features( connector=config.offline_store.connector, ) - entity_df_event_timestamp_col = offline_utils.infer_event_timestamp_from_entity_df( - entity_schema=entity_schema + entity_df_event_timestamp_col = ( + offline_utils.infer_event_timestamp_from_entity_df( + entity_schema=entity_schema + ) ) entity_df_event_timestamp_range = _get_entity_df_event_timestamp_range( @@ -327,11 +332,17 @@ def pull_all_from_table_or_query( WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' """ return TrinoRetrievalJob( - query=query, client=client, config=config, full_feature_names=False, + query=query, + client=client, + config=config, + full_feature_names=False, ) -def _get_table_reference_for_new_entity(catalog: str, dataset_name: str,) -> str: +def _get_table_reference_for_new_entity( + catalog: str, + dataset_name: str, +) -> str: """Gets the table_id for the new entity to be uploaded.""" table_name = offline_utils.get_temp_entity_table_name() return f"{catalog}.{dataset_name}.{table_name}" diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py index b559d0e59e..d82650712e 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py @@ -61,7 +61,8 @@ def from_proto(cls, trino_options_proto: DataSourceProto.TrinoOptions): Returns a TrinoOptions object based on the trino_options protobuf """ trino_options = cls( - table=trino_options_proto.table, query=trino_options_proto.query, + table=trino_options_proto.table, + query=trino_options_proto.query, ) return trino_options @@ -74,7 +75,8 @@ def to_proto(self) -> DataSourceProto.TrinoOptions: """ trino_options_proto = DataSourceProto.TrinoOptions( - table=self.table, query=self.query, + table=self.table, + query=self.query, ) return trino_options_proto diff --git a/sdk/python/feast/infra/offline_stores/file.py b/sdk/python/feast/infra/offline_stores/file.py index ae98f8d0c2..1af98c1437 100644 --- a/sdk/python/feast/infra/offline_stores/file.py +++ b/sdk/python/feast/infra/offline_stores/file.py @@ -88,7 +88,8 @@ def _to_arrow_internal(self): def persist(self, storage: SavedDatasetStorage): assert isinstance(storage, SavedDatasetFileStorage) filesystem, path = FileSource.create_filesystem_and_path( - storage.file_options.uri, storage.file_options.s3_endpoint_override, + storage.file_options.uri, + storage.file_options.s3_endpoint_override, ) if path.endswith(".parquet"): @@ -314,7 +315,9 @@ def evaluate_offline_job(): # TODO(kevjumba): remove try catch when fix is merged upstream in Dask. try: if created_timestamp_column: - source_df = source_df.sort_values(by=created_timestamp_column,) + source_df = source_df.sort_values( + by=created_timestamp_column, + ) source_df = source_df.sort_values(by=timestamp_field) @@ -352,7 +355,8 @@ def evaluate_offline_job(): # When materializing a single feature view, we don't need full feature names. On demand transforms aren't materialized return FileRetrievalJob( - evaluation_function=evaluate_offline_job, full_feature_names=False, + evaluation_function=evaluate_offline_job, + full_feature_names=False, ) @staticmethod @@ -394,7 +398,8 @@ def write_logged_features( data = pyarrow.parquet.read_table(data, use_threads=False, pre_buffer=False) filesystem, path = FileSource.create_filesystem_and_path( - destination.path, destination.s3_endpoint_override, + destination.path, + destination.s3_endpoint_override, ) pyarrow.dataset.write_dataset( @@ -453,7 +458,8 @@ def offline_write_batch( def _get_entity_df_event_timestamp_range( - entity_df: Union[pd.DataFrame, str], entity_df_event_timestamp_col: str, + entity_df: Union[pd.DataFrame, str], + entity_df_event_timestamp_col: str, ) -> Tuple[datetime, datetime]: if not isinstance(entity_df, pd.DataFrame): raise ValueError( @@ -483,7 +489,10 @@ def _read_datasource(data_source) -> dd.DataFrame: else None ) - return dd.read_parquet(data_source.path, storage_options=storage_options,) + return dd.read_parquet( + data_source.path, + storage_options=storage_options, + ) def _field_mapping( @@ -533,7 +542,8 @@ def _field_mapping( # Make sure to not have duplicated columns if entity_df_event_timestamp_col == timestamp_field: df_to_join = _run_dask_field_mapping( - df_to_join, {timestamp_field: f"__{timestamp_field}"}, + df_to_join, + {timestamp_field: f"__{timestamp_field}"}, ) timestamp_field = f"__{timestamp_field}" @@ -571,7 +581,9 @@ def _merge( def _normalize_timestamp( - df_to_join: dd.DataFrame, timestamp_field: str, created_timestamp_column: str, + df_to_join: dd.DataFrame, + timestamp_field: str, + created_timestamp_column: str, ) -> dd.DataFrame: df_to_join_types = df_to_join.dtypes timestamp_field_type = df_to_join_types[timestamp_field] @@ -645,14 +657,18 @@ def _drop_duplicates( df_to_join = df_to_join.persist() df_to_join = df_to_join.drop_duplicates( - all_join_keys + [entity_df_event_timestamp_col], keep="last", ignore_index=True, + all_join_keys + [entity_df_event_timestamp_col], + keep="last", + ignore_index=True, ) return df_to_join.persist() def _drop_columns( - df_to_join: dd.DataFrame, timestamp_field: str, created_timestamp_column: str, + df_to_join: dd.DataFrame, + timestamp_field: str, + created_timestamp_column: str, ) -> dd.DataFrame: entity_df_with_features = df_to_join.drop([timestamp_field], axis=1).persist() diff --git a/sdk/python/feast/infra/offline_stores/offline_store.py b/sdk/python/feast/infra/offline_stores/offline_store.py index e24317e859..c8a0cb8a5c 100644 --- a/sdk/python/feast/infra/offline_stores/offline_store.py +++ b/sdk/python/feast/infra/offline_stores/offline_store.py @@ -85,7 +85,8 @@ def to_df( for odfv in self.on_demand_feature_views: features_df = features_df.join( odfv.get_transformed_features_df( - features_df, self.full_feature_names, + features_df, + self.full_feature_names, ) ) @@ -129,7 +130,8 @@ def to_arrow( for odfv in self.on_demand_feature_views: features_df = features_df.join( odfv.get_transformed_features_df( - features_df, self.full_feature_names, + features_df, + self.full_feature_names, ) ) diff --git a/sdk/python/feast/infra/offline_stores/redshift.py b/sdk/python/feast/infra/offline_stores/redshift.py index 1d7b79727e..df70f958f7 100644 --- a/sdk/python/feast/infra/offline_stores/redshift.py +++ b/sdk/python/feast/infra/offline_stores/redshift.py @@ -192,12 +192,15 @@ def get_historical_features( entity_df, redshift_client, config, s3_resource ) - entity_df_event_timestamp_col = offline_utils.infer_event_timestamp_from_entity_df( - entity_schema + entity_df_event_timestamp_col = ( + offline_utils.infer_event_timestamp_from_entity_df(entity_schema) ) entity_df_event_timestamp_range = _get_entity_df_event_timestamp_range( - entity_df, entity_df_event_timestamp_col, redshift_client, config, + entity_df, + entity_df_event_timestamp_col, + redshift_client, + config, ) @contextlib.contextmanager diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index 71394c4403..0f4c6a7b52 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -224,12 +224,14 @@ def get_historical_features( entity_schema = _get_entity_schema(entity_df, snowflake_conn, config) - entity_df_event_timestamp_col = offline_utils.infer_event_timestamp_from_entity_df( - entity_schema + entity_df_event_timestamp_col = ( + offline_utils.infer_event_timestamp_from_entity_df(entity_schema) ) entity_df_event_timestamp_range = _get_entity_df_event_timestamp_range( - entity_df, entity_df_event_timestamp_col, snowflake_conn, + entity_df, + entity_df_event_timestamp_col, + snowflake_conn, ) @contextlib.contextmanager diff --git a/sdk/python/feast/infra/offline_stores/snowflake_source.py b/sdk/python/feast/infra/offline_stores/snowflake_source.py index b072c6e871..258fba71b1 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake_source.py +++ b/sdk/python/feast/infra/offline_stores/snowflake_source.py @@ -360,4 +360,6 @@ def to_proto(self) -> LoggingConfigProto: ) def to_data_source(self) -> DataSource: - return SnowflakeSource(table=self.table_name,) + return SnowflakeSource( + table=self.table_name, + ) diff --git a/sdk/python/feast/infra/online_stores/contrib/postgres.py b/sdk/python/feast/infra/online_stores/contrib/postgres.py index 81727067f5..e3999ab05a 100644 --- a/sdk/python/feast/infra/online_stores/contrib/postgres.py +++ b/sdk/python/feast/infra/online_stores/contrib/postgres.py @@ -112,7 +112,9 @@ def online_read( SELECT entity_key, feature_name, value, event_ts FROM {} WHERE entity_key = ANY(%s); """ - ).format(sql.Identifier(_table_id(project, table)),), + ).format( + sql.Identifier(_table_id(project, table)), + ), (keys,), ) @@ -228,7 +230,10 @@ def _drop_table_and_index(table_name): DROP TABLE IF EXISTS {}; DROP INDEX IF EXISTS {}; """ - ).format(sql.Identifier(table_name), sql.Identifier(f"{table_name}_ek"),) + ).format( + sql.Identifier(table_name), + sql.Identifier(f"{table_name}_ek"), + ) def _to_naive_utc(ts: datetime): diff --git a/sdk/python/feast/infra/online_stores/datastore.py b/sdk/python/feast/infra/online_stores/datastore.py index fc3659ea1a..8c5989789e 100644 --- a/sdk/python/feast/infra/online_stores/datastore.py +++ b/sdk/python/feast/infra/online_stores/datastore.py @@ -197,7 +197,12 @@ def _write_minibatch( document_id = compute_entity_id(entity_key) key = client.key( - "Project", project, "Table", table.name, "Row", document_id, + "Project", + project, + "Table", + table.name, + "Row", + document_id, ) entity = datastore.Entity( @@ -318,7 +323,10 @@ def _initialize_client( project_id: Optional[str], namespace: Optional[str] ) -> datastore.Client: try: - client = datastore.Client(project=project_id, namespace=namespace,) + client = datastore.Client( + project=project_id, + namespace=namespace, + ) return client except DefaultCredentialsError as e: raise FeastProviderLoginError( @@ -394,7 +402,8 @@ def from_infra_object_proto(infra_object_proto: InfraObjectProto) -> Any: @staticmethod def from_proto(datastore_table_proto: DatastoreTableProto) -> Any: datastore_table = DatastoreTable( - project=datastore_table_proto.project, name=datastore_table_proto.name, + project=datastore_table_proto.project, + name=datastore_table_proto.name, ) # Distinguish between null and empty string, since project_id and namespace are StringValues. diff --git a/sdk/python/feast/infra/online_stores/dynamodb.py b/sdk/python/feast/infra/online_stores/dynamodb.py index 6919f2cc29..530b078180 100644 --- a/sdk/python/feast/infra/online_stores/dynamodb.py +++ b/sdk/python/feast/infra/online_stores/dynamodb.py @@ -338,7 +338,8 @@ def _get_table_name( def _delete_table_idempotent( - dynamodb_resource, table_name: str, + dynamodb_resource, + table_name: str, ): try: table = dynamodb_resource.Table(table_name) @@ -399,7 +400,8 @@ def from_infra_object_proto(infra_object_proto: InfraObjectProto) -> Any: @staticmethod def from_proto(dynamodb_table_proto: DynamoDBTableProto) -> Any: return DynamoDBTable( - name=dynamodb_table_proto.name, region=dynamodb_table_proto.region, + name=dynamodb_table_proto.name, + region=dynamodb_table_proto.region, ) def update(self): diff --git a/sdk/python/feast/infra/online_stores/sqlite.py b/sdk/python/feast/infra/online_stores/sqlite.py index 2f0e902942..6689897d14 100644 --- a/sdk/python/feast/infra/online_stores/sqlite.py +++ b/sdk/python/feast/infra/online_stores/sqlite.py @@ -283,7 +283,10 @@ def from_infra_object_proto(infra_object_proto: InfraObjectProto) -> Any: @staticmethod def from_proto(sqlite_table_proto: SqliteTableProto) -> Any: - return SqliteTable(path=sqlite_table_proto.path, name=sqlite_table_proto.name,) + return SqliteTable( + path=sqlite_table_proto.path, + name=sqlite_table_proto.name, + ) def update(self): self.conn.execute( diff --git a/sdk/python/feast/infra/passthrough_provider.py b/sdk/python/feast/infra/passthrough_provider.py index e31eb1e177..0b09f5df43 100644 --- a/sdk/python/feast/infra/passthrough_provider.py +++ b/sdk/python/feast/infra/passthrough_provider.py @@ -135,7 +135,10 @@ def update_infra( ) def teardown_infra( - self, project: str, tables: Sequence[FeatureView], entities: Sequence[Entity], + self, + project: str, + tables: Sequence[FeatureView], + entities: Sequence[Entity], ) -> None: set_usage_attribute("provider", self.__class__.__name__) if self.online_store: @@ -187,7 +190,10 @@ def online_read( return result def ingest_df( - self, feature_view: FeatureView, entities: List[Entity], df: pd.DataFrame, + self, + feature_view: FeatureView, + entities: List[Entity], + df: pd.DataFrame, ): set_usage_attribute("provider", self.__class__.__name__) table = pa.Table.from_pandas(df) diff --git a/sdk/python/feast/infra/provider.py b/sdk/python/feast/infra/provider.py index 9695e4d736..086c9ec6b3 100644 --- a/sdk/python/feast/infra/provider.py +++ b/sdk/python/feast/infra/provider.py @@ -74,7 +74,10 @@ def plan_infra( @abc.abstractmethod def teardown_infra( - self, project: str, tables: Sequence[FeatureView], entities: Sequence[Entity], + self, + project: str, + tables: Sequence[FeatureView], + entities: Sequence[Entity], ): """ Tear down all cloud resources for a repo. @@ -114,7 +117,10 @@ def online_write_batch( ... def ingest_df( - self, feature_view: FeatureView, entities: List[Entity], df: pd.DataFrame, + self, + feature_view: FeatureView, + entities: List[Entity], + df: pd.DataFrame, ): """ Ingests a DataFrame directly into the online store @@ -122,7 +128,9 @@ def ingest_df( pass def ingest_df_to_offline_store( - self, feature_view: FeatureView, df: pyarrow.Table, + self, + feature_view: FeatureView, + df: pyarrow.Table, ): """ Ingests a DataFrame directly into the offline store diff --git a/sdk/python/feast/infra/registry_stores/sql.py b/sdk/python/feast/infra/registry_stores/sql.py index 2d3ac9d683..9c6b47a714 100644 --- a/sdk/python/feast/infra/registry_stores/sql.py +++ b/sdk/python/feast/infra/registry_stores/sql.py @@ -473,7 +473,9 @@ def list_project_metadata( self, project: str, allow_cache: bool = False ) -> List[ProjectMetadata]: with self.engine.connect() as conn: - stmt = select(feast_metadata).where(feast_metadata.c.project_id == project,) + stmt = select(feast_metadata).where( + feast_metadata.c.project_id == project, + ) rows = conn.execute(stmt).all() if rows: project_metadata = ProjectMetadata(project_name=project) @@ -486,7 +488,10 @@ def list_project_metadata( return [] def apply_saved_dataset( - self, saved_dataset: SavedDataset, project: str, commit: bool = True, + self, + saved_dataset: SavedDataset, + project: str, + commit: bool = True, ): return self._apply_object( saved_datasets, @@ -594,7 +599,9 @@ def apply_user_metadata( getattr(table.c, "feature_view_name") == name, table.c.project_id == project, ) - .values(values,) + .values( + values, + ) ) conn.execute(update_stmt) else: @@ -699,7 +706,9 @@ def _apply_object( update_stmt = ( update(table) .where(getattr(table.c, id_field_name) == name) - .values(values,) + .values( + values, + ) ) conn.execute(update_stmt) else: @@ -709,7 +718,9 @@ def _apply_object( "last_updated_timestamp": update_time, "project_id": project, } - insert_stmt = insert(table).values(values,) + insert_stmt = insert(table).values( + values, + ) conn.execute(insert_stmt) self._set_last_updated_metadata(update_datetime, project) @@ -818,7 +829,9 @@ def _set_last_updated_metadata(self, last_updated: datetime, project: str): ) conn.execute(update_stmt) else: - insert_stmt = insert(feast_metadata).values(values,) + insert_stmt = insert(feast_metadata).values( + values, + ) conn.execute(insert_stmt) def _get_last_updated_metadata(self, project: str): diff --git a/sdk/python/feast/infra/utils/aws_utils.py b/sdk/python/feast/infra/utils/aws_utils.py index 51aecbf8a7..3c8ad9d71b 100644 --- a/sdk/python/feast/infra/utils/aws_utils.py +++ b/sdk/python/feast/infra/utils/aws_utils.py @@ -89,7 +89,10 @@ def execute_redshift_statement_async( """ try: return redshift_data_client.execute_statement( - ClusterIdentifier=cluster_id, Database=database, DbUser=user, Sql=query, + ClusterIdentifier=cluster_id, + Database=database, + DbUser=user, + Sql=query, ) except ClientError as e: if e.response["Error"]["Code"] == "ValidationException": @@ -157,7 +160,11 @@ def get_redshift_statement_result(redshift_data_client, statement_id: str) -> di return redshift_data_client.get_statement_result(Id=statement_id) -def upload_df_to_s3(s3_resource, s3_path: str, df: pd.DataFrame,) -> None: +def upload_df_to_s3( + s3_resource, + s3_path: str, + df: pd.DataFrame, +) -> None: """Uploads a Pandas DataFrame to S3 as a parquet file Args: @@ -236,11 +243,19 @@ def upload_df_to_redshift( def delete_redshift_table( - redshift_data_client, cluster_id: str, database: str, user: str, table_name: str, + redshift_data_client, + cluster_id: str, + database: str, + user: str, + table_name: str, ): drop_query = f"DROP {table_name} IF EXISTS" execute_redshift_statement( - redshift_data_client, cluster_id, database, user, drop_query, + redshift_data_client, + cluster_id, + database, + user, + drop_query, ) @@ -376,7 +391,11 @@ def temporarily_upload_df_to_redshift( # Clean up the uploaded Redshift table execute_redshift_statement( - redshift_data_client, cluster_id, database, user, f"DROP TABLE {table_name}", + redshift_data_client, + cluster_id, + database, + user, + f"DROP TABLE {table_name}", ) @@ -423,7 +442,11 @@ def temporarily_upload_arrow_table_to_redshift( # Clean up the uploaded Redshift table execute_redshift_statement( - redshift_data_client, cluster_id, database, user, f"DROP TABLE {table_name}", + redshift_data_client, + cluster_id, + database, + user, + f"DROP TABLE {table_name}", ) @@ -491,7 +514,13 @@ def unload_redshift_query_to_pa( bucket, key = get_bucket_and_key(s3_path) execute_redshift_query_and_unload_to_s3( - redshift_data_client, cluster_id, database, user, s3_path, iam_role, query, + redshift_data_client, + cluster_id, + database, + user, + s3_path, + iam_role, + query, ) with tempfile.TemporaryDirectory() as temp_dir: diff --git a/sdk/python/feast/infra/utils/postgres/connection_utils.py b/sdk/python/feast/infra/utils/postgres/connection_utils.py index 6dbb4a4bc0..0e9cbf96fe 100644 --- a/sdk/python/feast/infra/utils/postgres/connection_utils.py +++ b/sdk/python/feast/infra/utils/postgres/connection_utils.py @@ -64,5 +64,8 @@ def get_query_schema(config: PostgreSQLConfig, sql_query: str) -> Dict[str, np.d """ with _get_conn(config) as conn: conn.set_session(readonly=True) - df = pd.read_sql(f"SELECT * FROM {sql_query} LIMIT 0", conn,) + df = pd.read_sql( + f"SELECT * FROM {sql_query} LIMIT 0", + conn, + ) return dict(zip(df.columns, df.dtypes)) diff --git a/sdk/python/feast/infra/utils/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake_utils.py index 05834ae436..78d505bd08 100644 --- a/sdk/python/feast/infra/utils/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake_utils.py @@ -342,7 +342,10 @@ def upload_df( def upload_local_pq( - path: Path, cursor: SnowflakeCursor, stage_name: str, parallel: int = 4, + path: Path, + cursor: SnowflakeCursor, + stage_name: str, + parallel: int = 4, ): """ Args: diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index bad4edba81..b4c136ab13 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -295,7 +295,10 @@ def to_proto(self) -> OnDemandFeatureViewProto: sources[source_name] = OnDemandSource( feature_view_projection=fv_projection.to_proto() ) - for (source_name, request_sources,) in self.source_request_sources.items(): + for ( + source_name, + request_sources, + ) in self.source_request_sources.items(): sources[source_name] = OnDemandSource( request_data_source=request_sources.to_proto() ) @@ -305,7 +308,8 @@ def to_proto(self) -> OnDemandFeatureViewProto: features=[feature.to_proto() for feature in self.features], sources=sources, user_defined_function=UserDefinedFunctionProto( - name=self.udf.__name__, body=dill.dumps(self.udf, recurse=True), + name=self.udf.__name__, + body=dill.dumps(self.udf, recurse=True), ), description=self.description, tags=self.tags, @@ -326,7 +330,10 @@ def from_proto(cls, on_demand_feature_view_proto: OnDemandFeatureViewProto): A OnDemandFeatureView object based on the on-demand feature view protobuf. """ sources = [] - for (_, on_demand_source,) in on_demand_feature_view_proto.spec.sources.items(): + for ( + _, + on_demand_source, + ) in on_demand_feature_view_proto.spec.sources.items(): if on_demand_source.WhichOneof("source") == "feature_view": sources.append( FeatureView.from_proto(on_demand_source.feature_view).projection @@ -393,7 +400,9 @@ def get_request_data_schema(self) -> Dict[str, ValueType]: return schema def get_transformed_features_df( - self, df_with_features: pd.DataFrame, full_feature_names: bool = False, + self, + df_with_features: pd.DataFrame, + full_feature_names: bool = False, ) -> pd.DataFrame: # Apply on demand transformations columns_to_cleanup = [] diff --git a/sdk/python/feast/registry.py b/sdk/python/feast/registry.py index f72fd717d2..c501a42c18 100644 --- a/sdk/python/feast/registry.py +++ b/sdk/python/feast/registry.py @@ -491,7 +491,10 @@ def apply_materialization( # Saved dataset operations @abstractmethod def apply_saved_dataset( - self, saved_dataset: SavedDataset, project: str, commit: bool = True, + self, + saved_dataset: SavedDataset, + project: str, + commit: bool = True, ): """ Stores a saved dataset metadata with Feast @@ -581,17 +584,17 @@ def get_validation_reference( self, name: str, project: str, allow_cache: bool = False ) -> ValidationReference: """ - Retrieves a validation reference. + Retrieves a validation reference. - Args: - name: Name of dataset - project: Feast project that this dataset belongs to - allow_cache: Whether to allow returning this dataset from a cached registry + Args: + name: Name of dataset + project: Feast project that this dataset belongs to + allow_cache: Whether to allow returning this dataset from a cached registry - Returns: - Returns either the specified ValidationReference, or raises an exception if - none is found - """ + Returns: + Returns either the specified ValidationReference, or raises an exception if + none is found + """ # TODO: Needs to be implemented. def list_validation_references( @@ -1574,7 +1577,10 @@ def delete_entity(self, name: str, project: str, commit: bool = True): raise EntityNotFoundException(name, project) def apply_saved_dataset( - self, saved_dataset: SavedDataset, project: str, commit: bool = True, + self, + saved_dataset: SavedDataset, + project: str, + commit: bool = True, ): """ Stores a saved dataset metadata with Feast @@ -1692,17 +1698,17 @@ def get_validation_reference( self, name: str, project: str, allow_cache: bool = False ) -> ValidationReference: """ - Retrieves a validation reference. + Retrieves a validation reference. - Args: - name: Name of dataset - project: Feast project that this dataset belongs to - allow_cache: Whether to allow returning this dataset from a cached registry + Args: + name: Name of dataset + project: Feast project that this dataset belongs to + allow_cache: Whether to allow returning this dataset from a cached registry - Returns: - Returns either the specified ValidationReference, or raises an exception if - none is found - """ + Returns: + Returns either the specified ValidationReference, or raises an exception if + none is found + """ registry_proto = self._get_registry_proto( project=project, allow_cache=allow_cache ) diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index f7f564df6f..37e2cf95e5 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -272,7 +272,8 @@ def _validate_online_store_config(cls, values): online_config_class(**values["online_store"]) except ValidationError as e: raise ValidationError( - [ErrorWrapper(e, loc="online_store")], model=RepoConfig, + [ErrorWrapper(e, loc="online_store")], + model=RepoConfig, ) return values @@ -306,7 +307,8 @@ def _validate_offline_store_config(cls, values): offline_config_class(**values["offline_store"]) except ValidationError as e: raise ValidationError( - [ErrorWrapper(e, loc="offline_store")], model=RepoConfig, + [ErrorWrapper(e, loc="offline_store")], + model=RepoConfig, ) return values @@ -340,7 +342,8 @@ def _validate_feature_server_config(cls, values): feature_server_config_class(**values["feature_server"]) except ValidationError as e: raise ValidationError( - [ErrorWrapper(e, loc="feature_server")], model=RepoConfig, + [ErrorWrapper(e, loc="feature_server")], + model=RepoConfig, ) return values @@ -377,7 +380,12 @@ def write_to_path(self, repo_path: Path): config_path = repo_path / "feature_store.yaml" with open(config_path, mode="w") as f: yaml.dump( - yaml.safe_load(self.json(exclude={"repo_path"}, exclude_unset=True,)), + yaml.safe_load( + self.json( + exclude={"repo_path"}, + exclude_unset=True, + ) + ), f, sort_keys=False, ) diff --git a/sdk/python/feast/stream_feature_view.py b/sdk/python/feast/stream_feature_view.py index f19b1fcff7..29e8abb7da 100644 --- a/sdk/python/feast/stream_feature_view.py +++ b/sdk/python/feast/stream_feature_view.py @@ -179,7 +179,8 @@ def to_proto(self): udf_proto = None if self.udf: udf_proto = UserDefinedFunctionProto( - name=self.udf.__name__, body=dill.dumps(self.udf, recurse=True), + name=self.udf.__name__, + body=dill.dumps(self.udf, recurse=True), ) spec = StreamFeatureViewSpecProto( name=self.name, diff --git a/sdk/python/feast/templates/aws/test.py b/sdk/python/feast/templates/aws/test.py index 07410954f7..3d223e8f26 100644 --- a/sdk/python/feast/templates/aws/test.py +++ b/sdk/python/feast/templates/aws/test.py @@ -54,7 +54,8 @@ def main(): # Retrieve features from the online store (Firestore) online_features = fs.get_online_features( - features=features, entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}], + features=features, + entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}], ).to_dict() print() diff --git a/sdk/python/feast/templates/gcp/test.py b/sdk/python/feast/templates/gcp/test.py index 538334044b..8ff11bda5c 100644 --- a/sdk/python/feast/templates/gcp/test.py +++ b/sdk/python/feast/templates/gcp/test.py @@ -54,7 +54,8 @@ def main(): # Retrieve features from the online store (Firestore) online_features = fs.get_online_features( - features=features, entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}], + features=features, + entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}], ).to_dict() print() diff --git a/sdk/python/feast/templates/postgres/driver_repo.py b/sdk/python/feast/templates/postgres/driver_repo.py index 4096943bb7..61e32eb58e 100644 --- a/sdk/python/feast/templates/postgres/driver_repo.py +++ b/sdk/python/feast/templates/postgres/driver_repo.py @@ -6,7 +6,10 @@ ) from feast.types import Float32, Int64 -driver = Entity(name="driver_id", join_keys=["driver_id"],) +driver = Entity( + name="driver_id", + join_keys=["driver_id"], +) driver_stats_source = PostgreSQLSource( diff --git a/sdk/python/feast/templates/postgres/test.py b/sdk/python/feast/templates/postgres/test.py index 81ac299698..d547bc8c64 100644 --- a/sdk/python/feast/templates/postgres/test.py +++ b/sdk/python/feast/templates/postgres/test.py @@ -52,7 +52,8 @@ def main(): # Retrieve features from the online store online_features = fs.get_online_features( - features=features, entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}], + features=features, + entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}], ).to_dict() print() diff --git a/sdk/python/feast/templates/snowflake/test.py b/sdk/python/feast/templates/snowflake/test.py index 32aa6380d5..3c33f6aefd 100644 --- a/sdk/python/feast/templates/snowflake/test.py +++ b/sdk/python/feast/templates/snowflake/test.py @@ -54,7 +54,8 @@ def main(): # Retrieve features from the online store online_features = fs.get_online_features( - features=features, entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}], + features=features, + entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}], ).to_dict() print() diff --git a/sdk/python/feast/templates/spark/example.py b/sdk/python/feast/templates/spark/example.py index d006353118..8ad48f53fc 100644 --- a/sdk/python/feast/templates/spark/example.py +++ b/sdk/python/feast/templates/spark/example.py @@ -16,8 +16,14 @@ # Entity definitions -driver = Entity(name="driver", description="driver id",) -customer = Entity(name="customer", description="customer id",) +driver = Entity( + name="driver", + description="driver id", +) +customer = Entity( + name="customer", + description="customer id", +) # Sources driver_hourly_stats = SparkSource( diff --git a/sdk/python/feast/types.py b/sdk/python/feast/types.py index 40c1d62e7d..0ba1725f17 100644 --- a/sdk/python/feast/types.py +++ b/sdk/python/feast/types.py @@ -178,7 +178,9 @@ def __str__(self): } -def from_value_type(value_type: ValueType,) -> FeastType: +def from_value_type( + value_type: ValueType, +) -> FeastType: """ Converts a ValueType enum to a Feast type. diff --git a/sdk/python/feast/ui_server.py b/sdk/python/feast/ui_server.py index cb275c8f91..4d1fd67dc1 100644 --- a/sdk/python/feast/ui_server.py +++ b/sdk/python/feast/ui_server.py @@ -83,7 +83,9 @@ def catch_all(): return Response(content, media_type="text/html") app.mount( - "/", StaticFiles(directory=ui_dir, html=True), name="site", + "/", + StaticFiles(directory=ui_dir, html=True), + name="site", ) return app diff --git a/sdk/python/feast/utils.py b/sdk/python/feast/utils.py index af22fbca3f..1b99934159 100644 --- a/sdk/python/feast/utils.py +++ b/sdk/python/feast/utils.py @@ -151,7 +151,8 @@ def _get_column_names( def _run_pyarrow_field_mapping( - table: pyarrow.Table, field_mapping: Dict[str, str], + table: pyarrow.Table, + field_mapping: Dict[str, str], ) -> pyarrow.Table: # run field mapping in the forward direction cols = table.column_names @@ -163,7 +164,8 @@ def _run_pyarrow_field_mapping( def _run_dask_field_mapping( - table: dd.DataFrame, field_mapping: Dict[str, str], + table: dd.DataFrame, + field_mapping: Dict[str, str], ): if field_mapping: # run field mapping in the forward direction diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 9e47356000..03bc8b9a28 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -33,9 +33,7 @@ anyio==3.6.1 # starlette # watchfiles appdirs==1.4.4 - # via - # black - # fissix + # via fissix appnope==0.1.3 # via ipython asn1crypto==1.5.1 @@ -53,7 +51,6 @@ async-timeout==4.0.2 attrs==21.4.0 # via # aiohttp - # black # bowler # jsonschema # pytest @@ -75,7 +72,7 @@ babel==2.10.3 # via sphinx backcall==0.2.0 # via ipython -black==19.10b0 +black==22.6.0 # via feast (setup.py) boto3==1.20.23 # via @@ -115,7 +112,7 @@ charset-normalizer==2.0.12 # aiohttp # requests # snowflake-connector-python -click==8.0.1 +click==8.1.3 # via # black # bowler @@ -383,7 +380,9 @@ mypy==0.961 # feast (setup.py) # sqlalchemy mypy-extensions==0.4.3 - # via mypy + # via + # black + # mypy mypy-protobuf==3.1 # via feast (setup.py) mysqlclient==2.1.1 @@ -445,7 +444,9 @@ pickleshare==0.7.5 pip-tools==6.8.0 # via feast (setup.py) platformdirs==2.5.2 - # via virtualenv + # via + # black + # virtualenv pluggy==1.0.0 # via pytest ply==3.11 @@ -597,8 +598,6 @@ pyyaml==6.0 # uvicorn redis==4.2.2 # via feast (setup.py) -regex==2022.7.9 - # via black requests==2.28.1 # via # adal @@ -695,11 +694,11 @@ thriftpy2==0.4.14 # via happybase toml==0.10.2 # via - # black # feast (setup.py) # pre-commit tomli==2.0.1 # via + # black # build # coverage # mypy @@ -722,8 +721,6 @@ traitlets==5.3.0 # nbformat trino==0.314.0 # via feast (setup.py) -typed-ast==1.5.4 - # via black typeguard==2.13.3 # via feast (setup.py) types-protobuf==3.19.22 diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 0440a171ed..115a627341 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -24,7 +24,7 @@ certifi==2022.6.15 # via requests charset-normalizer==2.1.0 # via requests -click==8.0.1 +click==8.1.3 # via # bowler # feast (setup.py) @@ -113,7 +113,7 @@ protobuf==3.20.1 # grpcio-reflection # proto-plus # tensorflow-metadata -pyarrow==6.0.1 +pyarrow==8.0.0 # via feast (setup.py) pyasn1==0.4.8 # via diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 67db3a978f..d25c433bac 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -4,7 +4,7 @@ # # pip-compile --extra=ci --output-file=sdk/python/requirements/py3.8-ci-requirements.txt # -absl-py==1.1.0 +absl-py==1.2.0 # via tensorflow-metadata adal==1.2.7 # via @@ -33,9 +33,7 @@ anyio==3.6.1 # starlette # watchfiles appdirs==1.4.4 - # via - # black - # fissix + # via fissix appnope==0.1.3 # via ipython asn1crypto==1.5.1 @@ -53,7 +51,6 @@ async-timeout==4.0.2 attrs==21.4.0 # via # aiohttp - # black # bowler # jsonschema # pytest @@ -79,7 +76,7 @@ backports-zoneinfo==0.2.1 # via # pytz-deprecation-shim # tzlocal -black==19.10b0 +black==22.6.0 # via feast (setup.py) boto3==1.20.23 # via @@ -119,7 +116,7 @@ charset-normalizer==2.0.12 # aiohttp # requests # snowflake-connector-python -click==8.0.1 +click==8.1.3 # via # black # bowler @@ -134,7 +131,7 @@ colorama==0.4.5 # via # feast (setup.py) # great-expectations -coverage[toml]==6.4.1 +coverage[toml]==6.4.2 # via pytest-cov cryptography==35.0.0 # via @@ -163,7 +160,7 @@ deprecation==2.1.0 # via testcontainers dill==0.3.5.1 # via feast (setup.py) -distlib==0.3.4 +distlib==0.3.5 # via virtualenv docker==5.0.3 # via @@ -179,13 +176,13 @@ execnet==1.9.0 # via pytest-xdist executing==0.8.3 # via stack-data -fastapi==0.78.0 +fastapi==0.79.0 # via feast (setup.py) fastavro==1.5.2 # via # feast (setup.py) # pandavro -fastjsonschema==2.15.3 +fastjsonschema==2.16.1 # via nbformat filelock==3.7.1 # via virtualenv @@ -218,9 +215,9 @@ google-api-core[grpc]==2.8.2 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.52.0 +google-api-python-client==2.53.0 # via firebase-admin -google-auth==2.9.0 +google-auth==2.9.1 # via # gcsfs # google-api-core @@ -235,19 +232,19 @@ google-auth-oauthlib==0.5.2 # via gcsfs google-cloud-bigquery[pandas]==3.2.0 # via feast (setup.py) -google-cloud-bigquery-storage==2.14.0 +google-cloud-bigquery-storage==2.14.1 # via # feast (setup.py) # google-cloud-bigquery -google-cloud-core==2.3.1 +google-cloud-core==2.3.2 # via # google-cloud-bigquery # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-cloud-datastore==2.7.2 +google-cloud-datastore==2.8.0 # via feast (setup.py) -google-cloud-firestore==2.5.3 +google-cloud-firestore==2.6.0 # via firebase-admin google-cloud-storage==2.4.0 # via @@ -260,7 +257,7 @@ google-resumable-media==2.3.3 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos==1.56.3 +googleapis-common-protos==1.56.4 # via # feast (setup.py) # google-api-core @@ -336,7 +333,7 @@ jsonpatch==1.32 # via great-expectations jsonpointer==2.3 # via jsonpatch -jsonschema==4.7.1 +jsonschema==4.7.2 # via # altair # feast (setup.py) @@ -356,7 +353,7 @@ mccabe==0.6.1 # via flake8 minio==7.1.0 # via feast (setup.py) -mistune==2.0.3 +mistune==2.0.4 # via great-expectations mmh3==3.0.0 # via feast (setup.py) @@ -389,7 +386,9 @@ mypy==0.961 # feast (setup.py) # sqlalchemy mypy-extensions==0.4.3 - # via mypy + # via + # black + # mypy mypy-protobuf==3.1 # via feast (setup.py) mysqlclient==2.1.1 @@ -451,7 +450,9 @@ pickleshare==0.7.5 pip-tools==6.8.0 # via feast (setup.py) platformdirs==2.5.2 - # via virtualenv + # via + # black + # virtualenv pluggy==1.0.0 # via pytest ply==3.11 @@ -603,8 +604,6 @@ pyyaml==6.0 # uvicorn redis==4.2.2 # via feast (setup.py) -regex==2022.7.9 - # via black requests==2.28.1 # via # adal @@ -703,11 +702,11 @@ thriftpy2==0.4.14 # via happybase toml==0.10.2 # via - # black # feast (setup.py) # pre-commit tomli==2.0.1 # via + # black # build # coverage # mypy @@ -730,8 +729,6 @@ traitlets==5.3.0 # nbformat trino==0.314.0 # via feast (setup.py) -typed-ast==1.5.4 - # via black typeguard==2.13.3 # via feast (setup.py) types-protobuf==3.19.22 @@ -742,13 +739,13 @@ types-python-dateutil==2.8.18 # via feast (setup.py) types-pytz==2022.1.1 # via feast (setup.py) -types-pyyaml==6.0.9 +types-pyyaml==6.0.10 # via feast (setup.py) -types-redis==4.3.3 +types-redis==4.3.4 # via feast (setup.py) -types-requests==2.28.0 +types-requests==2.28.2 # via feast (setup.py) -types-setuptools==62.6.1 +types-setuptools==63.2.0 # via feast (setup.py) types-tabulate==0.8.11 # via feast (setup.py) @@ -758,6 +755,7 @@ typing-extensions==4.3.0 # via # aioitertools # azure-core + # black # great-expectations # mypy # pydantic @@ -806,7 +804,7 @@ xmltodict==0.13.0 # via moto yarl==1.7.2 # via aiohttp -zipp==3.8.0 +zipp==3.8.1 # via # importlib-metadata # importlib-resources diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index a09289f662..3de6ae7e9e 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=sdk/python/requirements/py3.8-requirements.txt # -absl-py==1.1.0 +absl-py==1.2.0 # via tensorflow-metadata anyio==3.6.1 # via @@ -24,7 +24,7 @@ certifi==2022.6.15 # via requests charset-normalizer==2.1.0 # via requests -click==8.0.1 +click==8.1.3 # via # bowler # feast (setup.py) @@ -38,7 +38,7 @@ dask==2022.1.1 # via feast (setup.py) dill==0.3.5.1 # via feast (setup.py) -fastapi==0.78.0 +fastapi==0.79.0 # via feast (setup.py) fastavro==1.5.2 # via @@ -50,9 +50,9 @@ fsspec==2022.5.0 # via dask google-api-core==2.8.2 # via feast (setup.py) -google-auth==2.9.0 +google-auth==2.9.1 # via google-api-core -googleapis-common-protos==1.56.3 +googleapis-common-protos==1.56.4 # via # feast (setup.py) # google-api-core @@ -75,7 +75,7 @@ importlib-resources==5.8.0 # via jsonschema jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.7.1 +jsonschema==4.7.2 # via feast (setup.py) locket==1.0.0 # via partd @@ -115,7 +115,7 @@ protobuf==3.20.1 # grpcio-reflection # proto-plus # tensorflow-metadata -pyarrow==6.0.1 +pyarrow==8.0.0 # via feast (setup.py) pyasn1==0.4.8 # via @@ -198,5 +198,5 @@ watchfiles==0.15.0 # via uvicorn websockets==10.3 # via uvicorn -zipp==3.8.0 +zipp==3.8.1 # via importlib-resources diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 8be0c36052..2706348d41 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -33,9 +33,7 @@ anyio==3.6.1 # starlette # watchfiles appdirs==1.4.4 - # via - # black - # fissix + # via fissix appnope==0.1.3 # via ipython asn1crypto==1.5.1 @@ -53,7 +51,6 @@ async-timeout==4.0.2 attrs==21.4.0 # via # aiohttp - # black # bowler # jsonschema # pytest @@ -75,7 +72,7 @@ babel==2.10.3 # via sphinx backcall==0.2.0 # via ipython -black==19.10b0 +black==22.6.0 # via feast (setup.py) boto3==1.20.23 # via @@ -115,7 +112,7 @@ charset-normalizer==2.0.12 # aiohttp # requests # snowflake-connector-python -click==8.0.1 +click==8.1.3 # via # black # bowler @@ -383,7 +380,9 @@ mypy==0.961 # feast (setup.py) # sqlalchemy mypy-extensions==0.4.3 - # via mypy + # via + # black + # mypy mypy-protobuf==3.1 # via feast (setup.py) mysqlclient==2.1.1 @@ -445,7 +444,9 @@ pickleshare==0.7.5 pip-tools==6.8.0 # via feast (setup.py) platformdirs==2.5.2 - # via virtualenv + # via + # black + # virtualenv pluggy==1.0.0 # via pytest ply==3.11 @@ -597,8 +598,6 @@ pyyaml==6.0 # uvicorn redis==4.2.2 # via feast (setup.py) -regex==2022.7.9 - # via black requests==2.28.1 # via # adal @@ -697,11 +696,11 @@ thriftpy2==0.4.14 # via happybase toml==0.10.2 # via - # black # feast (setup.py) # pre-commit tomli==2.0.1 # via + # black # build # coverage # mypy @@ -724,8 +723,6 @@ traitlets==5.3.0 # nbformat trino==0.314.0 # via feast (setup.py) -typed-ast==1.5.4 - # via black typeguard==2.13.3 # via feast (setup.py) types-protobuf==3.19.22 @@ -752,6 +749,7 @@ typing-extensions==4.3.0 # via # aioitertools # azure-core + # black # great-expectations # mypy # pydantic diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 91e6602083..d015b3aa1c 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -24,7 +24,7 @@ certifi==2022.6.15 # via requests charset-normalizer==2.1.0 # via requests -click==8.0.1 +click==8.1.3 # via # bowler # feast (setup.py) @@ -113,7 +113,7 @@ protobuf==3.20.1 # grpcio-reflection # proto-plus # tensorflow-metadata -pyarrow==6.0.1 +pyarrow==8.0.0 # via feast (setup.py) pyasn1==0.4.8 # via diff --git a/sdk/python/tests/benchmarks/test_benchmark_universal_online_retrieval.py b/sdk/python/tests/benchmarks/test_benchmark_universal_online_retrieval.py index 6e22c93e5f..03070887c4 100644 --- a/sdk/python/tests/benchmarks/test_benchmark_universal_online_retrieval.py +++ b/sdk/python/tests/benchmarks/test_benchmark_universal_online_retrieval.py @@ -60,5 +60,7 @@ def test_online_retrieval(environment, universal_data_sources, benchmark): unprefixed_feature_refs.remove("conv_rate_plus_val_to_add") benchmark( - fs.get_online_features, features=feature_refs, entity_rows=entity_rows, + fs.get_online_features, + features=feature_refs, + entity_rows=entity_rows, ) diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index ac30149cfa..e296aeedbd 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -92,7 +92,10 @@ def pytest_addoption(parser): help="Run tests with external dependencies", ) parser.addoption( - "--benchmark", action="store_true", default=False, help="Run benchmark tests", + "--benchmark", + action="store_true", + default=False, + help="Run benchmark tests", ) parser.addoption( "--goserver", @@ -334,7 +337,8 @@ def feature_server_endpoint(environment): proc.start() # Wait for server to start wait_retry_backoff( - lambda: (None, _check_port_open("localhost", port)), timeout_secs=10, + lambda: (None, _check_port_open("localhost", port)), + timeout_secs=10, ) yield f"http://localhost:{port}" @@ -372,7 +376,9 @@ def universal_data_sources(environment) -> TestData: def e2e_data_sources(environment: Environment): df = create_basic_driver_dataset() data_source = environment.data_source_creator.create_data_source( - df, environment.feature_store.project, field_mapping={"ts_1": "ts"}, + df, + environment.feature_store.project, + field_mapping={"ts_1": "ts"}, ) return df, data_source diff --git a/sdk/python/tests/doctest/test_all.py b/sdk/python/tests/doctest/test_all.py index 31f181ad53..0412e34c36 100644 --- a/sdk/python/tests/doctest/test_all.py +++ b/sdk/python/tests/doctest/test_all.py @@ -17,7 +17,10 @@ def setup_feature_store(): init_repo("feature_repo", "local") fs = FeatureStore(repo_path="feature_repo") - driver = Entity(name="driver_id", description="driver id",) + driver = Entity( + name="driver_id", + description="driver id", + ) driver_hourly_stats = FileSource( path="feature_repo/data/driver_stats.parquet", timestamp_field="event_timestamp", @@ -88,7 +91,8 @@ def test_docstrings(): setup_function() test_suite = doctest.DocTestSuite( - temp_module, optionflags=doctest.ELLIPSIS, + temp_module, + optionflags=doctest.ELLIPSIS, ) if test_suite.countTestCases() > 0: result = unittest.TextTestRunner(sys.stdout).run(test_suite) diff --git a/sdk/python/tests/example_repos/example_feature_repo_1.py b/sdk/python/tests/example_repos/example_feature_repo_1.py index 8d6d96d9ef..5abd9fb18a 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_1.py +++ b/sdk/python/tests/example_repos/example_feature_repo_1.py @@ -28,11 +28,13 @@ ) customer_driver_combined_source = BigQuerySource( - table="feast-oss.public.customer_driver", timestamp_field="event_timestamp", + table="feast-oss.public.customer_driver", + timestamp_field="event_timestamp", ) driver_locations_push_source = PushSource( - name="driver_locations_push", batch_source=driver_locations_source, + name="driver_locations_push", + batch_source=driver_locations_source, ) driver = Entity( diff --git a/sdk/python/tests/example_repos/example_feature_repo_2.py b/sdk/python/tests/example_repos/example_feature_repo_2.py index 073c48c1c1..21476e3779 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_2.py +++ b/sdk/python/tests/example_repos/example_feature_repo_2.py @@ -9,7 +9,10 @@ created_timestamp_column="created", ) -driver = Entity(name="driver_id", description="driver id",) +driver = Entity( + name="driver_id", + description="driver id", +) driver_hourly_stats_view = FeatureView( diff --git a/sdk/python/tests/example_repos/example_feature_repo_version_0_19.py b/sdk/python/tests/example_repos/example_feature_repo_version_0_19.py index a65c031cea..68681794f9 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_version_0_19.py +++ b/sdk/python/tests/example_repos/example_feature_repo_version_0_19.py @@ -57,7 +57,8 @@ request_source = RequestDataSource( - name="conv_rate_input", schema={"val_to_add": ValueType.INT64}, + name="conv_rate_input", + schema={"val_to_add": ValueType.INT64}, ) diff --git a/sdk/python/tests/example_repos/example_feature_repo_with_duplicated_featureview_names.py b/sdk/python/tests/example_repos/example_feature_repo_with_duplicated_featureview_names.py index 4b079999ed..77b435ecc9 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_with_duplicated_featureview_names.py +++ b/sdk/python/tests/example_repos/example_feature_repo_with_duplicated_featureview_names.py @@ -6,7 +6,11 @@ path="driver_stats.parquet", # this parquet is not real and will not be read ) -driver = Entity(name="driver_id", description="driver id", join_keys=["driver"],) +driver = Entity( + name="driver_id", + description="driver id", + join_keys=["driver"], +) driver_hourly_stats_view = FeatureView( name="driver_hourly_stats", # Intentionally use the same FeatureView name diff --git a/sdk/python/tests/example_repos/example_feature_repo_with_entity_join_key.py b/sdk/python/tests/example_repos/example_feature_repo_with_entity_join_key.py index 0663150531..c30b933eaf 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_with_entity_join_key.py +++ b/sdk/python/tests/example_repos/example_feature_repo_with_entity_join_key.py @@ -11,7 +11,11 @@ # The join key here is deliberately different from the parquet file to test the failure path. -driver = Entity(name="driver_id", description="driver id", join_keys=["driver"],) +driver = Entity( + name="driver_id", + description="driver id", + join_keys=["driver"], +) driver_hourly_stats_view = FeatureView( diff --git a/sdk/python/tests/example_repos/on_demand_feature_view_repo.py b/sdk/python/tests/example_repos/on_demand_feature_view_repo.py index ac572d5747..5df0ee1c6f 100644 --- a/sdk/python/tests/example_repos/on_demand_feature_view_repo.py +++ b/sdk/python/tests/example_repos/on_demand_feature_view_repo.py @@ -15,7 +15,10 @@ owner="test2@gmail.com", ) -driver = Entity(name="driver_id", description="driver id",) +driver = Entity( + name="driver_id", + description="driver id", +) driver_daily_features_view = FeatureView( name="driver_daily_features", diff --git a/sdk/python/tests/foo_provider.py b/sdk/python/tests/foo_provider.py index bd6f9811e8..7866465b91 100644 --- a/sdk/python/tests/foo_provider.py +++ b/sdk/python/tests/foo_provider.py @@ -31,7 +31,10 @@ def update_infra( pass def teardown_infra( - self, project: str, tables: Sequence[FeatureView], entities: Sequence[Entity], + self, + project: str, + tables: Sequence[FeatureView], + entities: Sequence[Entity], ): pass diff --git a/sdk/python/tests/integration/e2e/test_go_feature_server.py b/sdk/python/tests/integration/e2e/test_go_feature_server.py index 465fa41769..1430d9cdf9 100644 --- a/sdk/python/tests/integration/e2e/test_go_feature_server.py +++ b/sdk/python/tests/integration/e2e/test_go_feature_server.py @@ -65,7 +65,9 @@ def server_port(environment, server_type: str): fs = environment.feature_store embedded = EmbeddedOnlineFeatureServer( - repo_path=str(fs.repo_path.absolute()), repo_config=fs.config, feature_store=fs, + repo_path=str(fs.repo_path.absolute()), + repo_config=fs.config, + feature_store=fs, ) port = free_port() if server_type == "grpc": diff --git a/sdk/python/tests/integration/e2e/test_python_feature_server.py b/sdk/python/tests/integration/e2e/test_python_feature_server.py index ac098d3f29..97b9693391 100644 --- a/sdk/python/tests/integration/e2e/test_python_feature_server.py +++ b/sdk/python/tests/integration/e2e/test_python_feature_server.py @@ -70,7 +70,10 @@ def test_push(python_fs_client): }, } ) - response = python_fs_client.post("/push", data=json_data,) + response = python_fs_client.post( + "/push", + data=json_data, + ) # Check new pushed temperature is fetched assert response.status_code == 200 diff --git a/sdk/python/tests/integration/e2e/test_validation.py b/sdk/python/tests/integration/e2e/test_validation.py index 0d65640dca..b8908663b3 100644 --- a/sdk/python/tests/integration/e2e/test_validation.py +++ b/sdk/python/tests/integration/e2e/test_validation.py @@ -118,9 +118,13 @@ def test_historical_retrieval_with_validation(environment, universal_data_source columns=["order_id", "origin_id", "destination_id"] ) reference_job = store.get_historical_features( - entity_df=entity_df, features=_features, + entity_df=entity_df, + features=_features, + ) + job = store.get_historical_features( + entity_df=entity_df, + features=_features, ) - job = store.get_historical_features(entity_df=entity_df, features=_features,) # Save dataset using reference job and retrieve it store.create_saved_dataset( @@ -149,7 +153,8 @@ def test_historical_retrieval_fails_on_validation(environment, universal_data_so ) reference_job = store.get_historical_features( - entity_df=entity_df, features=_features, + entity_df=entity_df, + features=_features, ) store.create_saved_dataset( @@ -158,7 +163,10 @@ def test_historical_retrieval_fails_on_validation(environment, universal_data_so storage=environment.data_source_creator.create_saved_dataset_destination(), ) - job = store.get_historical_features(entity_df=entity_df, features=_features,) + job = store.get_historical_features( + entity_df=entity_df, + features=_features, + ) with pytest.raises(ValidationFailed) as exc_info: job.to_df( diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 6342544417..672a5bdcfa 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -307,11 +307,12 @@ def values(self): def construct_universal_feature_views( - data_sources: UniversalDataSources, with_odfv: bool = True, + data_sources: UniversalDataSources, + with_odfv: bool = True, ) -> UniversalFeatureViews: driver_hourly_stats = create_driver_hourly_stats_feature_view(data_sources.driver) - driver_hourly_stats_base_feature_view = create_driver_hourly_stats_batch_feature_view( - data_sources.driver + driver_hourly_stats_base_feature_view = ( + create_driver_hourly_stats_batch_feature_view(data_sources.driver) ) return UniversalFeatureViews( customer=create_customer_daily_profile_feature_view(data_sources.customer), @@ -401,12 +402,13 @@ def construct_test_environment( if ( test_repo_config.python_feature_server and test_repo_config.provider == "aws" ) or test_repo_config.registry_location == RegistryLocation.S3: - registry: Union[str, RegistryConfig] = ( - f"s3://feast-integration-tests/registries/{project}/registry.db" - ) + registry: Union[ + str, RegistryConfig + ] = f"s3://feast-integration-tests/registries/{project}/registry.db" else: registry = RegistryConfig( - path=str(Path(repo_dir_name) / "registry.db"), cache_ttl_seconds=1, + path=str(Path(repo_dir_name) / "registry.db"), + cache_ttl_seconds=1, ) config = RepoConfig( diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py index ccc1544bb8..7b8e5e80e6 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py @@ -124,7 +124,9 @@ def _upload_parquet_file(self, df, file_name, minio_endpoint): if not client.bucket_exists(self.bucket): client.make_bucket(self.bucket) client.fput_object( - self.bucket, file_name, self.f.name, + self.bucket, + file_name, + self.f.name, ) def create_data_source( diff --git a/sdk/python/tests/integration/feature_repos/universal/feature_views.py b/sdk/python/tests/integration/feature_repos/universal/feature_views.py index 3fee0b7001..b6e9aa8fc0 100644 --- a/sdk/python/tests/integration/feature_repos/universal/feature_views.py +++ b/sdk/python/tests/integration/feature_repos/universal/feature_views.py @@ -130,7 +130,8 @@ def similarity_feature_view( def create_conv_rate_request_source(): return RequestSource( - name="conv_rate_input", schema=[Field(name="val_to_add", dtype=Int32)], + name="conv_rate_input", + schema=[Field(name="val_to_add", dtype=Int32)], ) @@ -296,7 +297,8 @@ def create_field_mapping_feature_view(source): def create_pushable_feature_view(batch_source: DataSource): push_source = PushSource( - name="location_stats_push_source", batch_source=batch_source, + name="location_stats_push_source", + batch_source=batch_source, ) return StreamFeatureView( name="pushable_location_stats", diff --git a/sdk/python/tests/integration/materialization/test_lambda.py b/sdk/python/tests/integration/materialization/test_lambda.py index 66cd2c5eb9..4a259fd365 100644 --- a/sdk/python/tests/integration/materialization/test_lambda.py +++ b/sdk/python/tests/integration/materialization/test_lambda.py @@ -38,11 +38,17 @@ def test_lambda_materialization(): df = create_basic_driver_dataset() ds = lambda_environment.data_source_creator.create_data_source( - df, lambda_environment.feature_store.project, field_mapping={"ts_1": "ts"}, + df, + lambda_environment.feature_store.project, + field_mapping={"ts_1": "ts"}, ) fs = lambda_environment.feature_store - driver = Entity(name="driver_id", join_key="driver_id", value_type=ValueType.INT64,) + driver = Entity( + name="driver_id", + join_key="driver_id", + value_type=ValueType.INT64, + ) driver_stats_fv = FeatureView( name="driver_hourly_stats", diff --git a/sdk/python/tests/integration/offline_store/test_feature_logging.py b/sdk/python/tests/integration/offline_store/test_feature_logging.py index a6f8e56de7..5d74ee284c 100644 --- a/sdk/python/tests/integration/offline_store/test_feature_logging.py +++ b/sdk/python/tests/integration/offline_store/test_feature_logging.py @@ -65,12 +65,14 @@ def test_feature_service_logging(environment, universal_data_sources, pass_as_pa with to_logs_dataset(first_batch, pass_as_path) as logs: store.write_logged_features( - source=feature_service, logs=logs, + source=feature_service, + logs=logs, ) with to_logs_dataset(second_batch, pass_as_path) as logs: store.write_logged_features( - source=feature_service, logs=logs, + source=feature_service, + logs=logs, ) expected_columns = list(set(logs_df.columns) - {LOG_DATE_FIELD}) diff --git a/sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py b/sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py index dfe14d73f9..645e0f7331 100644 --- a/sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py +++ b/sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py @@ -17,7 +17,9 @@ @pytest.mark.skip( reason="No way to run this test today. Credentials conflict with real AWS credentials in CI" ) -def test_registration_and_retrieval_from_custom_s3_endpoint(universal_data_sources,): +def test_registration_and_retrieval_from_custom_s3_endpoint( + universal_data_sources, +): config = IntegrationTestRepoConfig( offline_store_creator="tests.integration.feature_repos.universal.data_sources.file.S3FileDataSourceCreator" ) diff --git a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py index abaf1622c0..32e6e52d18 100644 --- a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py +++ b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py @@ -209,7 +209,10 @@ def get_expected_training_df( (f"global_stats__{k}" if full_feature_names else k): global_record.get( k, None ) - for k in ("num_rides", "avg_ride_length",) + for k in ( + "num_rides", + "avg_ride_length", + ) } ) @@ -882,5 +885,7 @@ def assert_frame_equal(expected_df, actual_df, keys): ) pd_assert_frame_equal( - expected_df, actual_df, check_dtype=False, + expected_df, + actual_df, + check_dtype=False, ) diff --git a/sdk/python/tests/integration/online_store/test_e2e_local.py b/sdk/python/tests/integration/online_store/test_e2e_local.py index c5b66e7ddc..46198a4062 100644 --- a/sdk/python/tests/integration/online_store/test_e2e_local.py +++ b/sdk/python/tests/integration/online_store/test_e2e_local.py @@ -135,7 +135,8 @@ def _test_materialize_and_online_retrieval( # Test `feast materialize-incremental` and online retrieval. r = runner.run( - ["materialize-incremental", end_date.isoformat()], cwd=Path(store.repo_path), + ["materialize-incremental", end_date.isoformat()], + cwd=Path(store.repo_path), ) assert r.returncode == 0, f"stdout: {r.stdout}\n stderr: {r.stderr}" diff --git a/sdk/python/tests/integration/online_store/test_universal_online.py b/sdk/python/tests/integration/online_store/test_universal_online.py index 228069c13c..6521c9ed2f 100644 --- a/sdk/python/tests/integration/online_store/test_universal_online.py +++ b/sdk/python/tests/integration/online_store/test_universal_online.py @@ -799,7 +799,10 @@ def get_latest_feature_values_from_dataframes( entity_row, customer_df, "customer_id", "customer_id" ) latest_location_row = get_latest_row( - entity_row, location_df, "location_id", "location_id", + entity_row, + location_df, + "location_id", + "location_id", ) # Since the event timestamp columns may contain timestamps of different timezones, diff --git a/sdk/python/tests/integration/registration/test_cli.py b/sdk/python/tests/integration/registration/test_cli.py index ecc17fc06c..15e5cf09ee 100644 --- a/sdk/python/tests/integration/registration/test_cli.py +++ b/sdk/python/tests/integration/registration/test_cli.py @@ -86,7 +86,8 @@ def test_universal_cli(environment: Environment): assertpy.assert_that(result.returncode).is_equal_to(0) assertpy.assert_that(fs.list_feature_views()).is_length(4) result = runner.run( - ["data-sources", "describe", "customer_profile_source"], cwd=repo_path, + ["data-sources", "describe", "customer_profile_source"], + cwd=repo_path, ) assertpy.assert_that(result.returncode).is_equal_to(0) assertpy.assert_that(fs.list_data_sources()).is_length(4) diff --git a/sdk/python/tests/integration/registration/test_feature_store.py b/sdk/python/tests/integration/registration/test_feature_store.py index 88a4b9f249..6243e27fca 100644 --- a/sdk/python/tests/integration/registration/test_feature_store.py +++ b/sdk/python/tests/integration/registration/test_feature_store.py @@ -87,11 +87,14 @@ def feature_store_with_s3_registry(): @pytest.mark.parametrize( - "test_feature_store", [lazy_fixture("feature_store_with_local_registry")], + "test_feature_store", + [lazy_fixture("feature_store_with_local_registry")], ) def test_apply_entity_success(test_feature_store): entity = Entity( - name="driver_car_id", description="Car driver id", tags={"team": "matchmaking"}, + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, ) # Register Entity @@ -121,7 +124,9 @@ def test_apply_entity_success(test_feature_store): ) def test_apply_entity_integration(test_feature_store): entity = Entity( - name="driver_car_id", description="Car driver id", tags={"team": "matchmaking"}, + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, ) # Register Entity @@ -150,7 +155,8 @@ def test_apply_entity_integration(test_feature_store): @pytest.mark.parametrize( - "test_feature_store", [lazy_fixture("feature_store_with_local_registry")], + "test_feature_store", + [lazy_fixture("feature_store_with_local_registry")], ) def test_apply_feature_view_success(test_feature_store): # Create Feature Views @@ -204,7 +210,8 @@ def test_apply_feature_view_success(test_feature_store): @pytest.mark.integration @pytest.mark.parametrize( - "test_feature_store", [lazy_fixture("feature_store_with_local_registry")], + "test_feature_store", + [lazy_fixture("feature_store_with_local_registry")], ) @pytest.mark.parametrize("dataframe_source", [lazy_fixture("simple_dataset_1")]) def test_feature_view_inference_success(test_feature_store, dataframe_source): @@ -345,7 +352,8 @@ def test_apply_feature_view_integration(test_feature_store): @pytest.mark.parametrize( - "test_feature_store", [lazy_fixture("feature_store_with_local_registry")], + "test_feature_store", + [lazy_fixture("feature_store_with_local_registry")], ) def test_apply_object_and_read(test_feature_store): assert isinstance(test_feature_store, FeatureStore) @@ -418,7 +426,8 @@ def test_apply_remote_repo(): @pytest.mark.parametrize( - "test_feature_store", [lazy_fixture("feature_store_with_local_registry")], + "test_feature_store", + [lazy_fixture("feature_store_with_local_registry")], ) @pytest.mark.parametrize("dataframe_source", [lazy_fixture("simple_dataset_1")]) def test_reapply_feature_view_success(test_feature_store, dataframe_source): diff --git a/sdk/python/tests/integration/registration/test_inference.py b/sdk/python/tests/integration/registration/test_inference.py index 935aa2d1a6..f660c46b15 100644 --- a/sdk/python/tests/integration/registration/test_inference.py +++ b/sdk/python/tests/integration/registration/test_inference.py @@ -113,7 +113,8 @@ def test_update_data_sources_with_inferred_event_timestamp_col(universal_data_so data_source.event_timestamp_column = None update_data_sources_with_inferred_event_timestamp_col( - data_sources_copy.values(), RepoConfig(provider="local", project="test"), + data_sources_copy.values(), + RepoConfig(provider="local", project="test"), ) actual_event_timestamp_cols = [ source.timestamp_field for source in data_sources_copy.values() @@ -127,7 +128,8 @@ def test_update_data_sources_with_inferred_event_timestamp_col(universal_data_so def test_on_demand_features_type_inference(): # Create Feature Views date_request = RequestSource( - name="date_request", schema=[Field(name="some_date", dtype=UnixTimestamp)], + name="date_request", + schema=[Field(name="some_date", dtype=UnixTimestamp)], ) @on_demand_feature_view( @@ -193,7 +195,10 @@ def test_view_with_missing_feature(features_df: pd.DataFrame) -> pd.DataFrame: ) def test_datasource_inference(request_source_schema): # Create Feature Views - date_request = RequestSource(name="date_request", schema=request_source_schema,) + date_request = RequestSource( + name="date_request", + schema=request_source_schema, + ) @on_demand_feature_view( # Note: we deliberately use positional arguments here to test that they work correctly, @@ -405,10 +410,14 @@ def test_update_feature_services_with_inferred_features(simple_dataset_1): with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: entity1 = Entity(name="test1", join_keys=["id_join_key"]) feature_view_1 = FeatureView( - name="test1", entities=[entity1], source=file_source, + name="test1", + entities=[entity1], + source=file_source, ) feature_view_2 = FeatureView( - name="test2", entities=[entity1], source=file_source, + name="test2", + entities=[entity1], + source=file_source, ) feature_service = FeatureService( diff --git a/sdk/python/tests/integration/registration/test_registry.py b/sdk/python/tests/integration/registration/test_registry.py index 27bbbbd2bb..e6309779f9 100644 --- a/sdk/python/tests/integration/registration/test_registry.py +++ b/sdk/python/tests/integration/registration/test_registry.py @@ -71,11 +71,14 @@ def s3_registry() -> Registry: @pytest.mark.parametrize( - "test_registry", [lazy_fixture("local_registry")], + "test_registry", + [lazy_fixture("local_registry")], ) def test_apply_entity_success(test_registry): entity = Entity( - name="driver_car_id", description="Car driver id", tags={"team": "matchmaking"}, + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, ) project = "project" @@ -115,11 +118,14 @@ def test_apply_entity_success(test_registry): @pytest.mark.integration @pytest.mark.parametrize( - "test_registry", [lazy_fixture("gcs_registry"), lazy_fixture("s3_registry")], + "test_registry", + [lazy_fixture("gcs_registry"), lazy_fixture("s3_registry")], ) def test_apply_entity_integration(test_registry): entity = Entity( - name="driver_car_id", description="Car driver id", tags={"team": "matchmaking"}, + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, ) project = "project" @@ -154,7 +160,8 @@ def test_apply_entity_integration(test_registry): @pytest.mark.parametrize( - "test_registry", [lazy_fixture("local_registry")], + "test_registry", + [lazy_fixture("local_registry")], ) def test_apply_feature_view_success(test_registry): # Create Feature Views @@ -229,7 +236,8 @@ def test_apply_feature_view_success(test_registry): @pytest.mark.parametrize( - "test_registry", [lazy_fixture("local_registry")], + "test_registry", + [lazy_fixture("local_registry")], ) def test_apply_on_demand_feature_view_success(test_registry): # Create Feature Views @@ -303,7 +311,8 @@ def location_features_from_push(inputs: pd.DataFrame) -> pd.DataFrame: @pytest.mark.parametrize( - "test_registry", [lazy_fixture("local_registry")], + "test_registry", + [lazy_fixture("local_registry")], ) def test_apply_stream_feature_view_success(test_registry): # Create Feature Views @@ -332,10 +341,14 @@ def simple_udf(x: int): description="desc", aggregations=[ Aggregation( - column="dummy_field", function="max", time_window=timedelta(days=1), + column="dummy_field", + function="max", + time_window=timedelta(days=1), ), Aggregation( - column="dummy_field2", function="count", time_window=timedelta(days=24), + column="dummy_field2", + function="count", + time_window=timedelta(days=24), ), ], timestamp_field="event_timestamp", @@ -368,7 +381,8 @@ def simple_udf(x: int): @pytest.mark.parametrize( - "test_registry", [lazy_fixture("local_registry")], + "test_registry", + [lazy_fixture("local_registry")], ) # TODO(kevjumba): remove this in feast 0.24 when deprecating @pytest.mark.parametrize( @@ -384,7 +398,10 @@ def test_modify_feature_views_success(test_registry, request_source_schema): created_timestamp_column="timestamp", ) - request_source = RequestSource(name="request_source", schema=request_source_schema,) + request_source = RequestSource( + name="request_source", + schema=request_source_schema, + ) entity = Entity(name="fs1_my_entity_1", join_keys=["test"]) @@ -493,7 +510,8 @@ def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: @pytest.mark.integration @pytest.mark.parametrize( - "test_registry", [lazy_fixture("gcs_registry"), lazy_fixture("s3_registry")], + "test_registry", + [lazy_fixture("gcs_registry"), lazy_fixture("s3_registry")], ) def test_apply_feature_view_integration(test_registry): # Create Feature Views @@ -569,14 +587,16 @@ def test_apply_feature_view_integration(test_registry): @pytest.mark.integration @pytest.mark.parametrize( - "test_registry", [lazy_fixture("gcs_registry"), lazy_fixture("s3_registry")], + "test_registry", + [lazy_fixture("gcs_registry"), lazy_fixture("s3_registry")], ) def test_apply_data_source_integration(test_registry: Registry): run_test_data_source_apply(test_registry) @pytest.mark.parametrize( - "test_registry", [lazy_fixture("local_registry")], + "test_registry", + [lazy_fixture("local_registry")], ) def test_apply_data_source(test_registry: Registry): run_test_data_source_apply(test_registry) @@ -649,7 +669,9 @@ def test_commit(): test_registry = Registry(registry_config, None) entity = Entity( - name="driver_car_id", description="Car driver id", tags={"team": "matchmaking"}, + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, ) project = "project" diff --git a/sdk/python/tests/integration/registration/test_sql_registry.py b/sdk/python/tests/integration/registration/test_sql_registry.py index 56aff8c6d1..23a19f664a 100644 --- a/sdk/python/tests/integration/registration/test_sql_registry.py +++ b/sdk/python/tests/integration/registration/test_sql_registry.py @@ -56,7 +56,10 @@ def pg_registry(): log_string_to_wait_for = "database system is ready to accept connections" waited = wait_for_logs( - container=container, predicate=log_string_to_wait_for, timeout=30, interval=10, + container=container, + predicate=log_string_to_wait_for, + timeout=30, + interval=10, ) logger.info("Waited for %s seconds until postgres container was up", waited) container_port = container.get_exposed_port(5432) @@ -86,7 +89,10 @@ def mysql_registry(): log_string_to_wait_for = "/usr/sbin/mysqld: ready for connections. Version: '8.0.29' socket: '/var/run/mysqld/mysqld.sock' port: 3306" waited = wait_for_logs( - container=container, predicate=log_string_to_wait_for, timeout=60, interval=10, + container=container, + predicate=log_string_to_wait_for, + timeout=60, + interval=10, ) logger.info("Waited for %s seconds until mysql container was up", waited) container_port = container.get_exposed_port(3306) @@ -106,11 +112,14 @@ def mysql_registry(): reason="does not run on mac github actions", ) @pytest.mark.parametrize( - "sql_registry", [lazy_fixture("mysql_registry"), lazy_fixture("pg_registry")], + "sql_registry", + [lazy_fixture("mysql_registry"), lazy_fixture("pg_registry")], ) def test_apply_entity_success(sql_registry): entity = Entity( - name="driver_car_id", description="Car driver id", tags={"team": "matchmaking"}, + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, ) project = "project" @@ -163,7 +172,8 @@ def assert_project_uuid(project, project_uuid, sql_registry): reason="does not run on mac github actions", ) @pytest.mark.parametrize( - "sql_registry", [lazy_fixture("mysql_registry"), lazy_fixture("pg_registry")], + "sql_registry", + [lazy_fixture("mysql_registry"), lazy_fixture("pg_registry")], ) def test_apply_feature_view_success(sql_registry): # Create Feature Views @@ -238,7 +248,8 @@ def test_apply_feature_view_success(sql_registry): reason="does not run on mac github actions", ) @pytest.mark.parametrize( - "sql_registry", [lazy_fixture("mysql_registry"), lazy_fixture("pg_registry")], + "sql_registry", + [lazy_fixture("mysql_registry"), lazy_fixture("pg_registry")], ) def test_apply_on_demand_feature_view_success(sql_registry): # Create Feature Views @@ -321,7 +332,8 @@ def location_features_from_push(inputs: pd.DataFrame) -> pd.DataFrame: reason="does not run on mac github actions", ) @pytest.mark.parametrize( - "sql_registry", [lazy_fixture("mysql_registry"), lazy_fixture("pg_registry")], + "sql_registry", + [lazy_fixture("mysql_registry"), lazy_fixture("pg_registry")], ) @pytest.mark.parametrize( "request_source_schema", @@ -336,7 +348,10 @@ def test_modify_feature_views_success(sql_registry, request_source_schema): created_timestamp_column="timestamp", ) - request_source = RequestSource(name="request_source", schema=request_source_schema,) + request_source = RequestSource( + name="request_source", + schema=request_source_schema, + ) entity = Entity(name="fs1_my_entity_1", join_keys=["test"]) @@ -445,7 +460,8 @@ def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: ) @pytest.mark.integration @pytest.mark.parametrize( - "sql_registry", [lazy_fixture("mysql_registry"), lazy_fixture("pg_registry")], + "sql_registry", + [lazy_fixture("mysql_registry"), lazy_fixture("pg_registry")], ) def test_apply_data_source(sql_registry): # Create Feature Views diff --git a/sdk/python/tests/integration/registration/test_stream_feature_view_apply.py b/sdk/python/tests/integration/registration/test_stream_feature_view_apply.py index 8e2af031c5..22a8d8a699 100644 --- a/sdk/python/tests/integration/registration/test_stream_feature_view_apply.py +++ b/sdk/python/tests/integration/registration/test_stream_feature_view_apply.py @@ -45,7 +45,9 @@ def test_apply_stream_feature_view(simple_dataset_1) -> None: description="desc", aggregations=[ Aggregation( - column="dummy_field", function="max", time_window=timedelta(days=1), + column="dummy_field", + function="max", + time_window=timedelta(days=1), ), Aggregation( column="dummy_field2", @@ -68,7 +70,8 @@ def simple_sfv(df): assert stream_feature_views[0] == simple_sfv features = fs.get_online_features( - features=["simple_sfv:dummy_field"], entity_rows=[{"test_key": 1001}], + features=["simple_sfv:dummy_field"], + entity_rows=[{"test_key": 1001}], ).to_dict(include_event_timestamps=True) assert "test_key" in features @@ -109,7 +112,9 @@ def test_stream_feature_view_udf(simple_dataset_1) -> None: description="desc", aggregations=[ Aggregation( - column="dummy_field", function="max", time_window=timedelta(days=1), + column="dummy_field", + function="max", + time_window=timedelta(days=1), ), Aggregation( column="dummy_field2", diff --git a/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py b/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py index b7a9a571af..ce960b9c35 100644 --- a/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py +++ b/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py @@ -31,7 +31,8 @@ def test_infer_odfv_features(environment, universal_data_sources, infer_features ) request_source = create_conv_rate_request_source() driver_odfv = conv_rate_plus_100_feature_view( - [driver_hourly_stats, request_source], infer_features=infer_features, + [driver_hourly_stats, request_source], + infer_features=infer_features, ) feast_objects = [driver_hourly_stats, driver_odfv, driver(), customer()] @@ -83,7 +84,8 @@ def test_infer_odfv_features_with_error(environment, universal_data_sources): ) request_source = create_conv_rate_request_source() driver_odfv = conv_rate_plus_100_feature_view( - [driver_hourly_stats, request_source], features=features, + [driver_hourly_stats, request_source], + features=features, ) feast_objects = [driver_hourly_stats, driver_odfv, driver(), customer()] diff --git a/sdk/python/tests/integration/registration/test_universal_types.py b/sdk/python/tests/integration/registration/test_universal_types.py index 938773b7b7..ad29531e11 100644 --- a/sdk/python/tests/integration/registration/test_universal_types.py +++ b/sdk/python/tests/integration/registration/test_universal_types.py @@ -64,7 +64,8 @@ class TypeTestConfig: @pytest.fixture( - params=OFFLINE_TYPE_TEST_CONFIGS, ids=[str(c) for c in OFFLINE_TYPE_TEST_CONFIGS], + params=OFFLINE_TYPE_TEST_CONFIGS, + ids=[str(c) for c in OFFLINE_TYPE_TEST_CONFIGS], ) def offline_types_test_fixtures(request, environment): config: TypeTestConfig = request.param @@ -78,7 +79,8 @@ def offline_types_test_fixtures(request, environment): @pytest.fixture( - params=ONLINE_TYPE_TEST_CONFIGS, ids=[str(c) for c in ONLINE_TYPE_TEST_CONFIGS], + params=ONLINE_TYPE_TEST_CONFIGS, + ids=[str(c) for c in ONLINE_TYPE_TEST_CONFIGS], ) def online_types_test_fixtures(request, environment): return get_fixtures(request, environment) @@ -87,15 +89,22 @@ def online_types_test_fixtures(request, environment): def get_fixtures(request, environment): config: TypeTestConfig = request.param # Lower case needed because Redshift lower-cases all table names - destination_name = f"feature_type_{config.feature_dtype}{config.feature_is_list}".replace( - ".", "" - ).lower() + destination_name = ( + f"feature_type_{config.feature_dtype}{config.feature_is_list}".replace( + ".", "" + ).lower() + ) config = request.param df = create_basic_driver_dataset( - Int64, config.feature_dtype, config.feature_is_list, config.has_empty_list, + Int64, + config.feature_dtype, + config.feature_is_list, + config.has_empty_list, ) data_source = environment.data_source_creator.create_data_source( - df, destination_name=destination_name, field_mapping={"ts_1": "ts"}, + df, + destination_name=destination_name, + field_mapping={"ts_1": "ts"}, ) fv = create_feature_view( destination_name, @@ -115,7 +124,10 @@ def test_entity_inference_types_match(environment, entity_type): fs = environment.feature_store # Don't specify value type in entity to force inference - df = create_basic_driver_dataset(entity_type, feature_dtype="int32",) + df = create_basic_driver_dataset( + entity_type, + feature_dtype="int32", + ) data_source = environment.data_source_creator.create_data_source( df, destination_name=f"entity_type_{entity_type.name.lower()}", @@ -186,7 +198,8 @@ def test_feature_get_historical_features_types_match( features = [f"{fv.name}:value"] historical_features = fs.get_historical_features( - entity_df=entity_df, features=features, + entity_df=entity_df, + features=features, ) # Note: Pandas doesn't play well with nan values in ints. BQ will also coerce to floats if there are NaNs historical_features_df = historical_features.to_df() @@ -235,7 +248,8 @@ def test_feature_get_online_features_types_match( ) online_features = fs.get_online_features( - features=features, entity_rows=[{"driver_id": 1}], + features=features, + entity_rows=[{"driver_id": 1}], ).to_dict() feature_list_dtype_to_expected_online_response_value_type = { @@ -340,7 +354,10 @@ def assert_feature_list_types( bool, np.bool_, ), # Can be `np.bool_` if from `np.array` rather that `list` - "datetime": (np.datetime64, datetime,), # datetime.datetime + "datetime": ( + np.datetime64, + datetime, + ), # datetime.datetime } expected_dtype = feature_list_dtype_to_expected_historical_feature_list_dtype[ feature_dtype diff --git a/sdk/python/tests/unit/diff/test_registry_diff.py b/sdk/python/tests/unit/diff/test_registry_diff.py index ae10c834c8..d12fc717f0 100644 --- a/sdk/python/tests/unit/diff/test_registry_diff.py +++ b/sdk/python/tests/unit/diff/test_registry_diff.py @@ -11,10 +11,16 @@ def test_tag_objects_for_keep_delete_update_add(simple_dataset_1): with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: entity = Entity(name="id", join_keys=["id"]) to_delete = FeatureView( - name="to_delete", entities=[entity], batch_source=file_source, ttl=None, + name="to_delete", + entities=[entity], + batch_source=file_source, + ttl=None, ) unchanged_fv = FeatureView( - name="fv1", entities=[entity], batch_source=file_source, ttl=None, + name="fv1", + entities=[entity], + batch_source=file_source, + ttl=None, ) pre_changed = FeatureView( name="fv2", @@ -31,7 +37,10 @@ def test_tag_objects_for_keep_delete_update_add(simple_dataset_1): tags={"when": "after"}, ) to_add = FeatureView( - name="to_add", entities=[entity], batch_source=file_source, ttl=None, + name="to_add", + entities=[entity], + batch_source=file_source, + ttl=None, ) keep, delete, update, add = tag_objects_for_keep_delete_update_add( diff --git a/sdk/python/tests/unit/test_data_sources.py b/sdk/python/tests/unit/test_data_sources.py index 0208a71503..61891ccf1a 100644 --- a/sdk/python/tests/unit/test_data_sources.py +++ b/sdk/python/tests/unit/test_data_sources.py @@ -20,7 +20,8 @@ def test_push_with_batch(): push_source = PushSource( - name="test", batch_source=BigQuerySource(table="test.test"), + name="test", + batch_source=BigQuerySource(table="test.test"), ) push_source_proto = push_source.to_proto() assert push_source_proto.HasField("batch_source") @@ -48,7 +49,11 @@ def test_request_source_primitive_type_to_proto(): Field(name="f2", dtype=Bool), ] request_source = RequestSource( - name="source", schema=schema, description="desc", tags={}, owner="feast", + name="source", + schema=schema, + description="desc", + tags={}, + owner="feast", ) request_proto = request_source.to_proto() deserialized_request_source = RequestSource.from_proto(request_proto) @@ -57,13 +62,16 @@ def test_request_source_primitive_type_to_proto(): def test_hash(): push_source_1 = PushSource( - name="test", batch_source=BigQuerySource(table="test.test"), + name="test", + batch_source=BigQuerySource(table="test.test"), ) push_source_2 = PushSource( - name="test", batch_source=BigQuerySource(table="test.test"), + name="test", + batch_source=BigQuerySource(table="test.test"), ) push_source_3 = PushSource( - name="test", batch_source=BigQuerySource(table="test.test2"), + name="test", + batch_source=BigQuerySource(table="test.test2"), ) push_source_4 = PushSource( name="test", diff --git a/sdk/python/tests/unit/test_entity.py b/sdk/python/tests/unit/test_entity.py index 04a857ddef..66ed02a71c 100644 --- a/sdk/python/tests/unit/test_entity.py +++ b/sdk/python/tests/unit/test_entity.py @@ -27,7 +27,9 @@ def test_join_key_default(): def test_entity_class_contains_tags(): with pytest.deprecated_call(): entity = Entity( - "my-entity", description="My entity", tags={"key1": "val1", "key2": "val2"}, + "my-entity", + description="My entity", + tags={"key1": "val1", "key2": "val2"}, ) assert "key1" in entity.tags.keys() and entity.tags["key1"] == "val1" assert "key2" in entity.tags.keys() and entity.tags["key2"] == "val2" diff --git a/sdk/python/tests/unit/test_feature_views.py b/sdk/python/tests/unit/test_feature_views.py index d6be8e0341..7b608b621d 100644 --- a/sdk/python/tests/unit/test_feature_views.py +++ b/sdk/python/tests/unit/test_feature_views.py @@ -117,7 +117,9 @@ def test_stream_feature_view_serialization(): description="desc", aggregations=[ Aggregation( - column="dummy_field", function="max", time_window=timedelta(days=1), + column="dummy_field", + function="max", + time_window=timedelta(days=1), ) ], timestamp_field="event_timestamp", @@ -153,7 +155,9 @@ def test_stream_feature_view_udfs(): description="desc", aggregations=[ Aggregation( - column="dummy_field", function="max", time_window=timedelta(days=1), + column="dummy_field", + function="max", + time_window=timedelta(days=1), ) ], timestamp_field="event_timestamp", diff --git a/sdk/python/tests/unit/test_on_demand_feature_view.py b/sdk/python/tests/unit/test_on_demand_feature_view.py index 33435b8557..5a0f5c98d8 100644 --- a/sdk/python/tests/unit/test_on_demand_feature_view.py +++ b/sdk/python/tests/unit/test_on_demand_feature_view.py @@ -107,7 +107,8 @@ def test_hash(): def test_inputs_parameter_deprecation_in_odfv(): date_request = RequestSource( - name="date_request", schema=[Field(name="some_date", dtype=UnixTimestamp)], + name="date_request", + schema=[Field(name="some_date", dtype=UnixTimestamp)], ) with pytest.warns(DeprecationWarning): diff --git a/sdk/python/tests/unit/test_usage.py b/sdk/python/tests/unit/test_usage.py index 13988d3264..ca84247430 100644 --- a/sdk/python/tests/unit/test_usage.py +++ b/sdk/python/tests/unit/test_usage.py @@ -234,4 +234,4 @@ def call_length_ms(call): return ( datetime.datetime.fromisoformat(call["end"]) - datetime.datetime.fromisoformat(call["start"]) - ).total_seconds() * 10 ** 3 + ).total_seconds() * 10**3 diff --git a/sdk/python/tests/utils/data_source_utils.py b/sdk/python/tests/utils/data_source_utils.py index d5f45964ca..3f10371734 100644 --- a/sdk/python/tests/utils/data_source_utils.py +++ b/sdk/python/tests/utils/data_source_utils.py @@ -16,7 +16,9 @@ def prep_file_source(df, timestamp_field=None) -> Iterator[FileSource]: f.close() df.to_parquet(f.name) file_source = FileSource( - file_format=ParquetFormat(), path=f.name, timestamp_field=timestamp_field, + file_format=ParquetFormat(), + path=f.name, + timestamp_field=timestamp_field, ) yield file_source @@ -38,7 +40,10 @@ def simple_bq_source_using_table_arg(df, timestamp_field=None) -> BigQuerySource job = client.load_table_from_dataframe(df, table) job.result() - return BigQuerySource(table=table, timestamp_field=timestamp_field,) + return BigQuerySource( + table=table, + timestamp_field=timestamp_field, + ) def simple_bq_source_using_query_arg(df, timestamp_field=None) -> BigQuerySource: diff --git a/setup.py b/setup.py index bbcd6ad3ec..c6d4aa30b7 100644 --- a/setup.py +++ b/setup.py @@ -25,18 +25,18 @@ from pathlib import Path from subprocess import CalledProcessError -from setuptools import find_packages, Extension +from setuptools import Extension, find_packages try: from setuptools import setup - from setuptools.command.build_py import build_py from setuptools.command.build_ext import build_ext as _build_ext + from setuptools.command.build_py import build_py from setuptools.command.develop import develop from setuptools.command.install import install except ImportError: - from distutils.command.build_py import build_py from distutils.command.build_ext import build_ext as _build_ext + from distutils.command.build_py import build_py from distutils.core import setup NAME = "feast" @@ -46,7 +46,7 @@ REQUIRES_PYTHON = ">=3.7.0" REQUIRED = [ - "click>=7.0.0,<8.0.2", + "click>=7.0.0,<9.0.0", "colorama>=0.3.9,<1", "dill==0.3.*", "fastavro>=1.1.0,<2", @@ -57,20 +57,20 @@ "Jinja2>=2,<4", "jsonschema", "mmh3", - "numpy>=1.22,<2", - "pandas>=1,<2", + "numpy>=1.22,<3", + "pandas>=1.4.3,<2", "pandavro==1.5.*", "protobuf>3.20,<4", - "proto-plus==1.20.*", - "pyarrow>=4,<7", + "proto-plus>=1.20.0,<2", + "pyarrow>=4,<9", "pydantic>=1,<2", - "pygments==2.12.0", + "pygments>=2.12.0,<3", "PyYAML>=5.4.*,<7", "SQLAlchemy[mypy]>1,<2", - "tabulate==0.8.*", + "tabulate>=0.8.0,<1", "tenacity>=7,<9", - "toml==0.10.*", - "tqdm==4.*", + "toml>=0.10.0,<1", + "tqdm>=4,<5", "typeguard", "fastapi>=0.68.0,<1", "uvicorn[standard]>=0.14.0,<1", @@ -126,9 +126,9 @@ CI_REQUIRED = ( [ "build", - "cryptography==35.0", + "cryptography>=35.0,<36", "flake8", - "black==19.10b0", + "black>=22.6.0,<23", "isort>=5,<6", "grpcio-tools==1.47.0", "grpcio-testing==1.47.0", @@ -427,12 +427,18 @@ def build_extension(self, ext: Extension): print(f"CWD: {os.getcwd()}") destination = os.path.dirname(os.path.abspath(self.get_ext_fullpath(ext.name))) - subprocess.check_call(["go", "install", "golang.org/x/tools/cmd/goimports"], - env={"PATH": bin_path, **go_env}) - subprocess.check_call(["go", "get", "github.com/go-python/gopy@v0.4.4"], - env={"PATH": bin_path, **go_env}) - subprocess.check_call(["go", "install", "github.com/go-python/gopy"], - env={"PATH": bin_path, **go_env}) + subprocess.check_call( + ["go", "install", "golang.org/x/tools/cmd/goimports"], + env={"PATH": bin_path, **go_env}, + ) + subprocess.check_call( + ["go", "get", "github.com/go-python/gopy@v0.4.4"], + env={"PATH": bin_path, **go_env}, + ) + subprocess.check_call( + ["go", "install", "github.com/go-python/gopy"], + env={"PATH": bin_path, **go_env}, + ) subprocess.check_call( [ "gopy", @@ -442,12 +448,16 @@ def build_extension(self, ext: Extension): "-vm", sys.executable, "--build-tags", - 'cgo,ccalloc', + "cgo,ccalloc", "--dynamic-link=True", "-no-make", *ext.sources, ], - env={"PATH": bin_path, "CGO_LDFLAGS_ALLOW": ".*", **go_env,}, + env={ + "PATH": bin_path, + "CGO_LDFLAGS_ALLOW": ".*", + **go_env, + }, ) def copy_extensions_to_source(self): @@ -535,4 +545,4 @@ def copy_extensions_to_source(self): ["github.com/feast-dev/feast/go/embedded"], ) ], -) \ No newline at end of file +) From d0d27a35a0d63a139970cb17542764ff2aaf6aaf Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Tue, 19 Jul 2022 17:32:27 -0700 Subject: [PATCH 43/73] fix: Version entity serialization mechanism and fix issue with int64 vals (#2944) * fix: version the entity serialization mechanism to fix issue with int64 vals Signed-off-by: Achal Shah * fix tests Signed-off-by: Achal Shah * Add a test Signed-off-by: Achal Shah * Add a test Signed-off-by: Achal Shah * fix test Signed-off-by: Achal Shah * fix test Signed-off-by: Achal Shah * fix test Signed-off-by: Achal Shah * simplify Signed-off-by: Achal Shah * simplify Signed-off-by: Achal Shah * feature_store.yaml Signed-off-by: Achal Shah * fix tests Signed-off-by: Achal Shah * remove protos Signed-off-by: Achal Shah * fix tests Signed-off-by: Achal Shah * update feature_store.yaml templates Signed-off-by: Achal Shah * fix java Signed-off-by: Achal Shah * fix java test Signed-off-by: Achal Shah * docs Signed-off-by: Achal Shah * docs Signed-off-by: Achal Shah * docs Signed-off-by: Achal Shah --- go/internal/feast/onlinestore/onlinestore.go | 2 +- .../feast/onlinestore/redisonlinestore.go | 38 ++-- .../onlinestore/redisonlinestore_test.go | 25 ++- .../feast/onlinestore/sqliteonlinestore.go | 15 +- go/internal/feast/registry/repoconfig.go | 2 + .../serving/config/ApplicationProperties.java | 177 +++++++++--------- .../config/ServingServiceConfigV2.java | 6 +- .../retriever/EntityKeySerializerV2.java | 23 ++- sdk/python/feast/infra/key_encoding_utils.py | 20 +- .../materialization/lambda/lambda_engine.py | 6 +- .../contrib/hbase_online_store/hbase.py | 11 +- .../infra/online_stores/contrib/postgres.py | 12 +- .../feast/infra/online_stores/datastore.py | 13 +- .../feast/infra/online_stores/dynamodb.py | 16 +- .../feast/infra/online_stores/helpers.py | 23 ++- sdk/python/feast/infra/online_stores/redis.py | 12 +- .../feast/infra/online_stores/sqlite.py | 18 +- sdk/python/feast/infra/utils/hbase_utils.py | 9 +- sdk/python/feast/on_demand_feature_view.py | 1 + sdk/python/feast/registry.py | 1 + sdk/python/feast/repo_config.py | 25 ++- .../feast/templates/aws/feature_store.yaml | 1 + .../feast/templates/gcp/feature_store.yaml | 3 +- .../feast/templates/hbase/feature_store.yaml | 1 + .../feast/templates/local/feature_store.yaml | 3 +- .../templates/minimal/feature_store.yaml | 3 +- .../templates/postgres/feature_store.yaml | 1 + .../templates/snowflake/feature_store.yaml | 1 + .../feast/templates/spark/feature_store.yaml | 1 + .../online_store/test_e2e_local.py | 2 +- .../test_dynamodb_online_store.py | 2 +- .../unit/infra/test_key_encoding_utils.py | 30 +++ .../tests/unit/test_serialization_version.py | 17 ++ sdk/python/tests/utils/cli_utils.py | 10 +- sdk/python/tests/utils/online_store_utils.py | 2 +- 35 files changed, 373 insertions(+), 159 deletions(-) create mode 100644 sdk/python/tests/unit/infra/test_key_encoding_utils.py create mode 100644 sdk/python/tests/unit/test_serialization_version.py diff --git a/go/internal/feast/onlinestore/onlinestore.go b/go/internal/feast/onlinestore/onlinestore.go index 64a05f144c..88cd3dbd9b 100644 --- a/go/internal/feast/onlinestore/onlinestore.go +++ b/go/internal/feast/onlinestore/onlinestore.go @@ -61,7 +61,7 @@ func NewOnlineStore(config *registry.RepoConfig) (OnlineStore, error) { onlineStore, err := NewSqliteOnlineStore(config.Project, config, config.OnlineStore) return onlineStore, err } else if onlineStoreType == "redis" { - onlineStore, err := NewRedisOnlineStore(config.Project, config.OnlineStore) + onlineStore, err := NewRedisOnlineStore(config.Project, config, config.OnlineStore) return onlineStore, err } else { return nil, fmt.Errorf("%s online store type is currently not supported; only redis and sqlite are supported", onlineStoreType) diff --git a/go/internal/feast/onlinestore/redisonlinestore.go b/go/internal/feast/onlinestore/redisonlinestore.go index 26f34cf896..8fb85085d4 100644 --- a/go/internal/feast/onlinestore/redisonlinestore.go +++ b/go/internal/feast/onlinestore/redisonlinestore.go @@ -6,6 +6,7 @@ import ( "encoding/binary" "errors" "fmt" + "github.com/feast-dev/feast/go/internal/feast/registry" "sort" "strconv" "strings" @@ -13,7 +14,7 @@ import ( "github.com/go-redis/redis/v8" "github.com/golang/protobuf/proto" "github.com/spaolacci/murmur3" - timestamppb "google.golang.org/protobuf/types/known/timestamppb" + "google.golang.org/protobuf/types/known/timestamppb" "github.com/feast-dev/feast/go/protos/feast/serving" "github.com/feast-dev/feast/go/protos/feast/types" @@ -37,10 +38,15 @@ type RedisOnlineStore struct { // Redis client connector client *redis.Client + + config *registry.RepoConfig } -func NewRedisOnlineStore(project string, onlineStoreConfig map[string]interface{}) (*RedisOnlineStore, error) { - store := RedisOnlineStore{project: project} +func NewRedisOnlineStore(project string, config *registry.RepoConfig, onlineStoreConfig map[string]interface{}) (*RedisOnlineStore, error) { + store := RedisOnlineStore{ + project: project, + config: config, + } var address []string var password string @@ -161,7 +167,7 @@ func (r *RedisOnlineStore) OnlineRead(ctx context.Context, entityKeys []*types.E redisKeyToEntityIndex := make(map[string]int) for i := 0; i < len(entityKeys); i++ { - var key, err = buildRedisKey(r.project, entityKeys[i]) + var key, err = buildRedisKey(r.project, entityKeys[i], r.config.EntityKeySerializationVersion) if err != nil { return nil, err } @@ -270,8 +276,8 @@ func (r *RedisOnlineStore) Destruct() { } -func buildRedisKey(project string, entityKey *types.EntityKey) (*[]byte, error) { - serKey, err := serializeEntityKey(entityKey) +func buildRedisKey(project string, entityKey *types.EntityKey, entityKeySerializationVersion int64) (*[]byte, error) { + serKey, err := serializeEntityKey(entityKey, entityKeySerializationVersion) if err != nil { return nil, err } @@ -279,7 +285,7 @@ func buildRedisKey(project string, entityKey *types.EntityKey) (*[]byte, error) return &fullKey, nil } -func serializeEntityKey(entityKey *types.EntityKey) (*[]byte, error) { +func serializeEntityKey(entityKey *types.EntityKey, entityKeySerializationVersion int64) (*[]byte, error) { // Serialize entity key to a bytestring so that it can be used as a lookup key in a hash table. // Ensure that we have the right amount of join keys and entity values @@ -316,7 +322,7 @@ func serializeEntityKey(entityKey *types.EntityKey) (*[]byte, error) { offset := (2 * len(keys)) + (i * 3) value := m[keys[i]].GetVal() - valueBytes, valueTypeBytes, err := serializeValue(value) + valueBytes, valueTypeBytes, err := serializeValue(value, entityKeySerializationVersion) if err != nil { return valueBytes, err } @@ -341,7 +347,7 @@ func serializeEntityKey(entityKey *types.EntityKey) (*[]byte, error) { return &entityKeyBuffer, nil } -func serializeValue(value interface{}) (*[]byte, types.ValueType_Enum, error) { +func serializeValue(value interface{}, entityKeySerializationVersion int64) (*[]byte, types.ValueType_Enum, error) { // TODO: Implement support for other types (at least the major types like ints, strings, bytes) switch x := (value).(type) { case *types.Value_StringVal: @@ -354,10 +360,16 @@ func serializeValue(value interface{}) (*[]byte, types.ValueType_Enum, error) { binary.LittleEndian.PutUint32(valueBuffer, uint32(x.Int32Val)) return &valueBuffer, types.ValueType_INT32, nil case *types.Value_Int64Val: - // TODO (woop): We unfortunately have to use 32 bit here for backward compatibility :( - valueBuffer := make([]byte, 4) - binary.LittleEndian.PutUint32(valueBuffer, uint32(x.Int64Val)) - return &valueBuffer, types.ValueType_INT64, nil + if entityKeySerializationVersion <= 1 { + // We unfortunately have to use 32 bit here for backward compatibility :( + valueBuffer := make([]byte, 4) + binary.LittleEndian.PutUint32(valueBuffer, uint32(x.Int64Val)) + return &valueBuffer, types.ValueType_INT64, nil + } else { + valueBuffer := make([]byte, 8) + binary.LittleEndian.PutUint64(valueBuffer, uint64(x.Int64Val)) + return &valueBuffer, types.ValueType_INT64, nil + } case nil: return nil, types.ValueType_INVALID, fmt.Errorf("could not detect type for %v", x) default: diff --git a/go/internal/feast/onlinestore/redisonlinestore_test.go b/go/internal/feast/onlinestore/redisonlinestore_test.go index 43cdbe06a2..ad9ef1e1e4 100644 --- a/go/internal/feast/onlinestore/redisonlinestore_test.go +++ b/go/internal/feast/onlinestore/redisonlinestore_test.go @@ -1,6 +1,7 @@ package onlinestore import ( + "github.com/feast-dev/feast/go/internal/feast/registry" "testing" "github.com/stretchr/testify/assert" @@ -10,7 +11,11 @@ func TestNewRedisOnlineStore(t *testing.T) { var config = map[string]interface{}{ "connection_string": "redis://localhost:6379", } - store, err := NewRedisOnlineStore("test", config) + rc := ®istry.RepoConfig{ + OnlineStore: config, + EntityKeySerializationVersion: 2, + } + store, err := NewRedisOnlineStore("test", rc, config) assert.Nil(t, err) var opts = store.client.Options() assert.Equal(t, opts.Addr, "redis://localhost:6379") @@ -23,7 +28,11 @@ func TestNewRedisOnlineStoreWithPassword(t *testing.T) { var config = map[string]interface{}{ "connection_string": "redis://localhost:6379,password=secret", } - store, err := NewRedisOnlineStore("test", config) + rc := ®istry.RepoConfig{ + OnlineStore: config, + EntityKeySerializationVersion: 2, + } + store, err := NewRedisOnlineStore("test", rc, config) assert.Nil(t, err) var opts = store.client.Options() assert.Equal(t, opts.Addr, "redis://localhost:6379") @@ -34,7 +43,11 @@ func TestNewRedisOnlineStoreWithDB(t *testing.T) { var config = map[string]interface{}{ "connection_string": "redis://localhost:6379,db=1", } - store, err := NewRedisOnlineStore("test", config) + rc := ®istry.RepoConfig{ + OnlineStore: config, + EntityKeySerializationVersion: 2, + } + store, err := NewRedisOnlineStore("test", rc, config) assert.Nil(t, err) var opts = store.client.Options() assert.Equal(t, opts.Addr, "redis://localhost:6379") @@ -45,7 +58,11 @@ func TestNewRedisOnlineStoreWithSsl(t *testing.T) { var config = map[string]interface{}{ "connection_string": "redis://localhost:6379,ssl=true", } - store, err := NewRedisOnlineStore("test", config) + rc := ®istry.RepoConfig{ + OnlineStore: config, + EntityKeySerializationVersion: 2, + } + store, err := NewRedisOnlineStore("test", rc, config) assert.Nil(t, err) var opts = store.client.Options() assert.Equal(t, opts.Addr, "redis://localhost:6379") diff --git a/go/internal/feast/onlinestore/sqliteonlinestore.go b/go/internal/feast/onlinestore/sqliteonlinestore.go index 94ba0c0d56..1f407ad39c 100644 --- a/go/internal/feast/onlinestore/sqliteonlinestore.go +++ b/go/internal/feast/onlinestore/sqliteonlinestore.go @@ -16,7 +16,7 @@ import ( _ "github.com/mattn/go-sqlite3" "google.golang.org/protobuf/proto" - timestamppb "google.golang.org/protobuf/types/known/timestamppb" + "google.golang.org/protobuf/types/known/timestamppb" "github.com/feast-dev/feast/go/protos/feast/serving" "github.com/feast-dev/feast/go/protos/feast/types" @@ -24,15 +24,16 @@ import ( type SqliteOnlineStore struct { // Feast project name - project string - path string - db *sql.DB - db_mu sync.Mutex + project string + path string + db *sql.DB + db_mu sync.Mutex + repoConfig *registry.RepoConfig } // Creates a new sqlite online store object. onlineStoreConfig should have relative path of database file with respect to repoConfig.repoPath. func NewSqliteOnlineStore(project string, repoConfig *registry.RepoConfig, onlineStoreConfig map[string]interface{}) (*SqliteOnlineStore, error) { - store := SqliteOnlineStore{project: project} + store := SqliteOnlineStore{project: project, repoConfig: repoConfig} if db_path, ok := onlineStoreConfig["path"]; !ok { return nil, fmt.Errorf("cannot find sqlite path %s", db_path) } else { @@ -69,7 +70,7 @@ func (s *SqliteOnlineStore) OnlineRead(ctx context.Context, entityKeys []*types. in_query := make([]string, len(entityKeys)) serialized_entities := make([]interface{}, len(entityKeys)) for i := 0; i < len(entityKeys); i++ { - serKey, err := serializeEntityKey(entityKeys[i]) + serKey, err := serializeEntityKey(entityKeys[i], s.repoConfig.EntityKeySerializationVersion) if err != nil { return nil, err } diff --git a/go/internal/feast/registry/repoconfig.go b/go/internal/feast/registry/repoconfig.go index 59d125b1bf..b034b632dc 100644 --- a/go/internal/feast/registry/repoconfig.go +++ b/go/internal/feast/registry/repoconfig.go @@ -30,6 +30,8 @@ type RepoConfig struct { Flags map[string]interface{} `json:"flags"` // RepoPath RepoPath string `json:"repo_path"` + // EntityKeySerializationVersion + EntityKeySerializationVersion int64 `json:"entity_key_serialization_version"` } type RegistryConfig struct { diff --git a/java/serving/src/main/java/feast/serving/config/ApplicationProperties.java b/java/serving/src/main/java/feast/serving/config/ApplicationProperties.java index 268592d20a..5850eb6483 100644 --- a/java/serving/src/main/java/feast/serving/config/ApplicationProperties.java +++ b/java/serving/src/main/java/feast/serving/config/ApplicationProperties.java @@ -38,22 +38,84 @@ /** Feast Serving properties. */ public class ApplicationProperties { private static final Logger log = org.slf4j.LoggerFactory.getLogger(ApplicationProperties.class); + private FeastProperties feast; + private GrpcServer grpc; + private RestServer rest; - public static class FeastProperties { - /* Feast Serving build version */ - @NotBlank private String version = "unknown"; + public FeastProperties getFeast() { + return feast; + } - public void setRegistry(String registry) { - this.registry = registry; + public void setFeast(FeastProperties feast) { + this.feast = feast; + } + + public GrpcServer getGrpc() { + return grpc; + } + + public void setGrpc(GrpcServer grpc) { + this.grpc = grpc; + } + + public RestServer getRest() { + return rest; + } + + public void setRest(RestServer rest) { + this.rest = rest; + } + + /** + * Validates all FeastProperties. This method runs after properties have been initialized and + * individually and conditionally validates each class. + */ + @PostConstruct + public void validate() { + ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); + Validator validator = factory.getValidator(); + + // Validate root fields in FeastProperties + Set> violations = validator.validate(this); + if (!violations.isEmpty()) { + throw new ConstraintViolationException(violations); } + } + public enum StoreType { + REDIS, + REDIS_CLUSTER; + } + + public static class FeastProperties { + /* Feast Serving build version */ + @NotBlank private String version = "unknown"; @NotBlank private String registry; + @NotBlank private String project; + private int registryRefreshInterval; + private int entityKeySerializationVersion; + /** Name of the active store configuration (only one store can be active at a time). */ + @NotBlank private String activeStore; + /** + * Collection of store configurations. The active store is selected by the "activeStore" field. + */ + @JsonMerge(OptBoolean.FALSE) + private List stores = new ArrayList<>(); + /* Metric tracing properties. */ + private TracingProperties tracing; + /* Feast Audit Logging properties */ + @NotNull private LoggingProperties logging; + private String gcpProject; + private String awsRegion; + private String transformationServiceEndpoint; public String getRegistry() { return registry; } - @NotBlank private String project; + public void setRegistry(String registry) { + this.registry = registry; + } public String getProject() { return project; @@ -63,8 +125,6 @@ public void setProject(final String project) { this.project = project; } - private int registryRefreshInterval; - public int getRegistryRefreshInterval() { return registryRefreshInterval; } @@ -73,6 +133,14 @@ public void setRegistryRefreshInterval(int registryRefreshInterval) { this.registryRefreshInterval = registryRefreshInterval; } + public int getEntityKeySerializationVersion() { + return entityKeySerializationVersion; + } + + public void setEntityKeySerializationVersion(int entityKeySerializationVersion) { + this.entityKeySerializationVersion = entityKeySerializationVersion; + } + /** * Finds and returns the active store * @@ -92,25 +160,6 @@ public void setActiveStore(String activeStore) { this.activeStore = activeStore; } - /** Name of the active store configuration (only one store can be active at a time). */ - @NotBlank private String activeStore; - - /** - * Collection of store configurations. The active store is selected by the "activeStore" field. - */ - @JsonMerge(OptBoolean.FALSE) - private List stores = new ArrayList<>(); - - /* Metric tracing properties. */ - private TracingProperties tracing; - - /* Feast Audit Logging properties */ - @NotNull private LoggingProperties logging; - - public void setStores(List stores) { - this.stores = stores; - } - /** * Gets Serving store configuration as a list of {@link Store}. * @@ -120,6 +169,10 @@ public List getStores() { return stores; } + public void setStores(List stores) { + this.stores = stores; + } + /** * Gets Feast Serving build version. * @@ -129,10 +182,6 @@ public String getVersion() { return version; } - public void setTracing(TracingProperties tracing) { - this.tracing = tracing; - } - /** * Gets tracing properties * @@ -142,6 +191,10 @@ public TracingProperties getTracing() { return tracing; } + public void setTracing(TracingProperties tracing) { + this.tracing = tracing; + } + /** * Gets logging properties * @@ -151,8 +204,6 @@ public LoggingProperties getLogging() { return logging; } - private String gcpProject; - public String getGcpProject() { return gcpProject; } @@ -161,17 +212,13 @@ public void setGcpProject(String gcpProject) { this.gcpProject = gcpProject; } - public void setAwsRegion(String awsRegion) { - this.awsRegion = awsRegion; - } - - private String awsRegion; - public String getAwsRegion() { return awsRegion; } - private String transformationServiceEndpoint; + public void setAwsRegion(String awsRegion) { + this.awsRegion = awsRegion; + } public String getTransformationServiceEndpoint() { return transformationServiceEndpoint; @@ -182,16 +229,6 @@ public void setTransformationServiceEndpoint(String transformationServiceEndpoin } } - private FeastProperties feast; - - public void setFeast(FeastProperties feast) { - this.feast = feast; - } - - public FeastProperties getFeast() { - return feast; - } - /** Store configuration class for database that this Feast Serving uses. */ public static class Store { @@ -327,30 +364,6 @@ public void setServer(Server server) { } } - private GrpcServer grpc; - private RestServer rest; - - public GrpcServer getGrpc() { - return grpc; - } - - public void setGrpc(GrpcServer grpc) { - this.grpc = grpc; - } - - public RestServer getRest() { - return rest; - } - - public void setRest(RestServer rest) { - this.rest = rest; - } - - public enum StoreType { - REDIS, - REDIS_CLUSTER; - } - /** Trace metric collection properties */ public static class TracingProperties { @@ -417,20 +430,4 @@ public void setServiceName(String serviceName) { this.serviceName = serviceName; } } - - /** - * Validates all FeastProperties. This method runs after properties have been initialized and - * individually and conditionally validates each class. - */ - @PostConstruct - public void validate() { - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - - // Validate root fields in FeastProperties - Set> violations = validator.validate(this); - if (!violations.isEmpty()) { - throw new ConstraintViolationException(violations); - } - } } diff --git a/java/serving/src/main/java/feast/serving/config/ServingServiceConfigV2.java b/java/serving/src/main/java/feast/serving/config/ServingServiceConfigV2.java index 4ea0692ccd..868e3b83d1 100644 --- a/java/serving/src/main/java/feast/serving/config/ServingServiceConfigV2.java +++ b/java/serving/src/main/java/feast/serving/config/ServingServiceConfigV2.java @@ -48,7 +48,8 @@ public ServingServiceV2 registryBasedServingServiceV2( new OnlineRetriever( applicationProperties.getFeast().getProject(), redisClusterClient, - new EntityKeySerializerV2()); + new EntityKeySerializerV2( + applicationProperties.getFeast().getEntityKeySerializationVersion())); break; case REDIS: RedisClientAdapter redisClient = RedisClient.create(store.getRedisConfig()); @@ -57,7 +58,8 @@ public ServingServiceV2 registryBasedServingServiceV2( new OnlineRetriever( applicationProperties.getFeast().getProject(), redisClient, - new EntityKeySerializerV2()); + new EntityKeySerializerV2( + applicationProperties.getFeast().getEntityKeySerializationVersion())); break; default: throw new RuntimeException( diff --git a/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/EntityKeySerializerV2.java b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/EntityKeySerializerV2.java index 3e9ab7e8ab..f99e5cbdb1 100644 --- a/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/EntityKeySerializerV2.java +++ b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/EntityKeySerializerV2.java @@ -30,6 +30,15 @@ // https://github.com/feast-dev/feast/blob/b1ccf8dd1535f721aee8bea937ee38feff80bec5/sdk/python/feast/infra/key_encoding_utils.py#L22 // and must be kept up to date with any changes in that logic. public class EntityKeySerializerV2 implements EntityKeySerializer { + private final int entityKeySerializationVersion; + + public EntityKeySerializerV2() { + this(1); + } + + public EntityKeySerializerV2(int entityKeySerializationVersion) { + this.entityKeySerializationVersion = entityKeySerializationVersion; + } @Override public byte[] serialize(RedisProto.RedisKeyV2 entityKey) { @@ -83,7 +92,11 @@ public byte[] serialize(RedisProto.RedisKeyV2 entityKey) { we use `struct.pack(" encodeInteger(Integer value) { return Arrays.asList(ArrayUtils.toObject(buffer.array())); } + private List encodeLong(Long value) { + ByteBuffer buffer = ByteBuffer.allocate(Long.BYTES); + buffer.order(ByteOrder.LITTLE_ENDIAN); + buffer.putLong(value); + + return Arrays.asList(ArrayUtils.toObject(buffer.array())); + } + private List encodeString(String value) { byte[] stringBytes = value.getBytes(StandardCharsets.UTF_8); return encodeBytes(stringBytes); diff --git a/sdk/python/feast/infra/key_encoding_utils.py b/sdk/python/feast/infra/key_encoding_utils.py index 8333610473..62b6b72724 100644 --- a/sdk/python/feast/infra/key_encoding_utils.py +++ b/sdk/python/feast/infra/key_encoding_utils.py @@ -6,7 +6,9 @@ from feast.protos.feast.types.Value_pb2 import ValueType -def _serialize_val(value_type, v: ValueProto) -> Tuple[bytes, int]: +def _serialize_val( + value_type, v: ValueProto, entity_key_serialization_version=1 +) -> Tuple[bytes, int]: if value_type == "string_val": return v.string_val.encode("utf8"), ValueType.STRING elif value_type == "bytes_val": @@ -14,14 +16,16 @@ def _serialize_val(value_type, v: ValueProto) -> Tuple[bytes, int]: elif value_type == "int32_val": return struct.pack(" bytes: """ - Serialize keys to a bytestring so it can be used to prefix-scan through items stored in the online store + Serialize keys to a bytestring, so it can be used to prefix-scan through items stored in the online store using serialize_entity_key. This encoding is a partial implementation of serialize_entity_key, only operating on the keys of entities, @@ -35,7 +39,9 @@ def serialize_entity_key_prefix(entity_keys: List[str]) -> bytes: return b"".join(output) -def serialize_entity_key(entity_key: EntityKeyProto) -> bytes: +def serialize_entity_key( + entity_key: EntityKeyProto, entity_key_serialization_version=1 +) -> bytes: """ Serialize entity key to a bytestring so it can be used as a lookup key in a hash table. @@ -54,7 +60,11 @@ def serialize_entity_key(entity_key: EntityKeyProto) -> bytes: output.append(struct.pack(" OnlineStore: return online_store_class() -def _redis_key(project: str, entity_key: EntityKeyProto) -> bytes: - key: List[bytes] = [serialize_entity_key(entity_key), project.encode("utf-8")] +def _redis_key( + project: str, entity_key: EntityKeyProto, entity_key_serialization_version=1 +) -> bytes: + key: List[bytes] = [ + serialize_entity_key( + entity_key, + entity_key_serialization_version=entity_key_serialization_version, + ), + project.encode("utf-8"), + ] return b"".join(key) @@ -40,10 +48,17 @@ def _mmh3(key: str): return bytes.fromhex(struct.pack(" str: +def compute_entity_id( + entity_key: EntityKeyProto, entity_key_serialization_version=1 +) -> str: """ Compute Entity id given Feast Entity Key for online stores. Remember that Entity here refers to `EntityKeyProto` which is used in some online stores to encode the keys. It has nothing to do with the Entity concept we have in Feast. """ - return mmh3.hash_bytes(serialize_entity_key(entity_key)).hex() + return mmh3.hash_bytes( + serialize_entity_key( + entity_key, + entity_key_serialization_version=entity_key_serialization_version, + ) + ).hex() diff --git a/sdk/python/feast/infra/online_stores/redis.py b/sdk/python/feast/infra/online_stores/redis.py index 60fa9265ca..da458a3693 100644 --- a/sdk/python/feast/infra/online_stores/redis.py +++ b/sdk/python/feast/infra/online_stores/redis.py @@ -199,7 +199,11 @@ def online_write_batch( # TODO: investigate if check and set is a better approach rather than pulling all entity ts and then setting # it may be significantly slower but avoids potential (rare) race conditions for entity_key, _, _, _ in data: - redis_key_bin = _redis_key(project, entity_key) + redis_key_bin = _redis_key( + project, + entity_key, + entity_key_serialization_version=config.entity_key_serialization_version, + ) keys.append(redis_key_bin) pipe.hmget(redis_key_bin, ts_key) prev_event_timestamps = pipe.execute() @@ -268,7 +272,11 @@ def online_read( keys = [] for entity_key in entity_keys: - redis_key_bin = _redis_key(project, entity_key) + redis_key_bin = _redis_key( + project, + entity_key, + entity_key_serialization_version=config.entity_key_serialization_version, + ) keys.append(redis_key_bin) with client.pipeline(transaction=False) as pipe: for redis_key_bin in keys: diff --git a/sdk/python/feast/infra/online_stores/sqlite.py b/sdk/python/feast/infra/online_stores/sqlite.py index 6689897d14..a880cef050 100644 --- a/sdk/python/feast/infra/online_stores/sqlite.py +++ b/sdk/python/feast/infra/online_stores/sqlite.py @@ -95,7 +95,10 @@ def online_write_batch( with conn: for entity_key, values, timestamp, created_ts in data: - entity_key_bin = serialize_entity_key(entity_key) + entity_key_bin = serialize_entity_key( + entity_key, + entity_key_serialization_version=config.entity_key_serialization_version, + ) timestamp = to_naive_utc(timestamp) if created_ts is not None: created_ts = to_naive_utc(created_ts) @@ -153,7 +156,13 @@ def online_read( f"FROM {_table_id(config.project, table)} " f"WHERE entity_key IN ({','.join('?' * len(entity_keys))}) " f"ORDER BY entity_key", - [serialize_entity_key(entity_key) for entity_key in entity_keys], + [ + serialize_entity_key( + entity_key, + entity_key_serialization_version=config.entity_key_serialization_version, + ) + for entity_key in entity_keys + ], ) rows = cur.fetchall() @@ -161,7 +170,10 @@ def online_read( k: list(group) for k, group in itertools.groupby(rows, key=lambda r: r[0]) } for entity_key in entity_keys: - entity_key_bin = serialize_entity_key(entity_key) + entity_key_bin = serialize_entity_key( + entity_key, + entity_key_serialization_version=config.entity_key_serialization_version, + ) res = {} res_ts = None for _, feature_name, val_bin, ts in rows.get(entity_key_bin, []): diff --git a/sdk/python/feast/infra/utils/hbase_utils.py b/sdk/python/feast/infra/utils/hbase_utils.py index 78a39caed8..4816a60087 100644 --- a/sdk/python/feast/infra/utils/hbase_utils.py +++ b/sdk/python/feast/infra/utils/hbase_utils.py @@ -167,13 +167,16 @@ def main(): table = connection.table("test_hbase_driver_hourly_stats") row_keys = [ serialize_entity_key( - EntityKey(join_keys=["driver_id"], entity_values=[Value(int64_val=1004)]) + EntityKey(join_keys=["driver_id"], entity_values=[Value(int64_val=1004)]), + entity_key_serialization_version=2, ).hex(), serialize_entity_key( - EntityKey(join_keys=["driver_id"], entity_values=[Value(int64_val=1005)]) + EntityKey(join_keys=["driver_id"], entity_values=[Value(int64_val=1005)]), + entity_key_serialization_version=2, ).hex(), serialize_entity_key( - EntityKey(join_keys=["driver_id"], entity_values=[Value(int64_val=1024)]) + EntityKey(join_keys=["driver_id"], entity_values=[Value(int64_val=1024)]), + entity_key_serialization_version=2, ).hex(), ] rows = table.rows(row_keys) diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index b4c136ab13..bb45dd6eb6 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -348,6 +348,7 @@ def from_proto(cls, on_demand_feature_view_proto: OnDemandFeatureViewProto): sources.append( RequestSource.from_proto(on_demand_source.request_data_source) ) + on_demand_feature_view_obj = cls( name=on_demand_feature_view_proto.spec.name, schema=[ diff --git a/sdk/python/feast/registry.py b/sdk/python/feast/registry.py index c501a42c18..336bb2429f 100644 --- a/sdk/python/feast/registry.py +++ b/sdk/python/feast/registry.py @@ -1180,6 +1180,7 @@ def apply_feature_view( else: del existing_feature_views_of_same_type[idx] break + existing_feature_views_of_same_type.append(feature_view_proto) if commit: self.commit() diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index 37e2cf95e5..2947f10f54 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -1,5 +1,6 @@ import logging import os +import warnings from pathlib import Path from typing import Any @@ -25,6 +26,8 @@ from feast.importer import import_class from feast.usage import log_exceptions +warnings.simplefilter("once", RuntimeWarning) + _logger = logging.getLogger(__name__) # These dict exists so that: @@ -35,7 +38,6 @@ "lambda": "feast.infra.materialization.lambda.lambda_engine.LambdaMaterializationEngine", } - ONLINE_STORE_CLASS_FOR_TYPE = { "sqlite": "feast.infra.online_stores.sqlite.SqliteOnlineStore", "datastore": "feast.infra.online_stores.datastore.DatastoreOnlineStore", @@ -139,6 +141,17 @@ class RepoConfig(FeastBaseModel): go_feature_retrieval: Optional[bool] = False + entity_key_serialization_version: StrictInt = 1 + """ Entity key serialization version: This version is used to control what serialization scheme is + used when writing data to the online store. + A value <= 1 uses the serialization scheme used by feast up to Feast 0.22. + A value of 2 uses a newer serialization scheme, supported as of Feast 0.23. + The main difference between the two scheme is that the serialization scheme v1 stored `long` values as `int`s, + which would result in errors trying to serialize a range of values. + v2 fixes this error, but v1 is kept around to ensure backwards compatibility - specifically the ability to read + feature values for entities that have already been written into the online store. + """ + def __init__(self, **data: Any): super().__init__(**data) @@ -178,6 +191,16 @@ def __init__(self, **data: Any): self.feature_server["type"] )(**self.feature_server) + if self.entity_key_serialization_version <= 1: + warnings.warn( + "`entity_key_serialization_version` is either not specified in the feature_store.yaml, " + "or is specified to a value <= 1." + "This serialization version may cause errors when trying to write fields with the `Long` data type" + " into the online store. Specifying `entity_key_serialization_version` to 2 is recommended for" + " new projects. ", + RuntimeWarning, + ) + def get_registry_config(self): if isinstance(self.registry, str): return RegistryConfig(path=self.registry) diff --git a/sdk/python/feast/templates/aws/feature_store.yaml b/sdk/python/feast/templates/aws/feature_store.yaml index 27d1c6879f..3745a75347 100644 --- a/sdk/python/feast/templates/aws/feature_store.yaml +++ b/sdk/python/feast/templates/aws/feature_store.yaml @@ -12,3 +12,4 @@ offline_store: user: %REDSHIFT_USER% s3_staging_location: %REDSHIFT_S3_STAGING_LOCATION% iam_role: %REDSHIFT_IAM_ROLE% +entity_key_serialization_version: 2 diff --git a/sdk/python/feast/templates/gcp/feature_store.yaml b/sdk/python/feast/templates/gcp/feature_store.yaml index 14c8d5a94f..74ee729090 100644 --- a/sdk/python/feast/templates/gcp/feature_store.yaml +++ b/sdk/python/feast/templates/gcp/feature_store.yaml @@ -1,3 +1,4 @@ project: my_project registry: data/registry.db -provider: gcp \ No newline at end of file +provider: gcp +entity_key_serialization_version: 2 diff --git a/sdk/python/feast/templates/hbase/feature_store.yaml b/sdk/python/feast/templates/hbase/feature_store.yaml index 83ce237b71..f99e858f7c 100644 --- a/sdk/python/feast/templates/hbase/feature_store.yaml +++ b/sdk/python/feast/templates/hbase/feature_store.yaml @@ -5,3 +5,4 @@ online_store: type: hbase host: 127.0.0.1 port: 9090 +entity_key_serialization_version: 2 diff --git a/sdk/python/feast/templates/local/feature_store.yaml b/sdk/python/feast/templates/local/feature_store.yaml index dcbe32d943..fddde04f90 100644 --- a/sdk/python/feast/templates/local/feature_store.yaml +++ b/sdk/python/feast/templates/local/feature_store.yaml @@ -2,4 +2,5 @@ project: my_project registry: data/registry.db provider: local online_store: - path: data/online_store.db \ No newline at end of file + path: data/online_store.db +entity_key_serialization_version: 2 diff --git a/sdk/python/feast/templates/minimal/feature_store.yaml b/sdk/python/feast/templates/minimal/feature_store.yaml index 2083288ad7..9808690005 100644 --- a/sdk/python/feast/templates/minimal/feature_store.yaml +++ b/sdk/python/feast/templates/minimal/feature_store.yaml @@ -2,4 +2,5 @@ project: my_project registry: /path/to/registry.db provider: local online_store: - path: /path/to/online_store.db \ No newline at end of file + path: /path/to/online_store.db +entity_key_serialization_version: 2 diff --git a/sdk/python/feast/templates/postgres/feature_store.yaml b/sdk/python/feast/templates/postgres/feature_store.yaml index 53b86b7064..0ccd4a6d49 100644 --- a/sdk/python/feast/templates/postgres/feature_store.yaml +++ b/sdk/python/feast/templates/postgres/feature_store.yaml @@ -25,3 +25,4 @@ offline_store: db_schema: DB_SCHEMA user: DB_USERNAME password: DB_PASSWORD +entity_key_serialization_version: 2 diff --git a/sdk/python/feast/templates/snowflake/feature_store.yaml b/sdk/python/feast/templates/snowflake/feature_store.yaml index 9757ea2ead..948869897b 100644 --- a/sdk/python/feast/templates/snowflake/feature_store.yaml +++ b/sdk/python/feast/templates/snowflake/feature_store.yaml @@ -9,3 +9,4 @@ offline_store: role: SNOWFLAKE_ROLE warehouse: SNOWFLAKE_WAREHOUSE database: SNOWFLAKE_DATABASE +entity_key_serialization_version: 2 diff --git a/sdk/python/feast/templates/spark/feature_store.yaml b/sdk/python/feast/templates/spark/feature_store.yaml index 2ea0ddfcc9..91e3ecf472 100644 --- a/sdk/python/feast/templates/spark/feature_store.yaml +++ b/sdk/python/feast/templates/spark/feature_store.yaml @@ -12,3 +12,4 @@ offline_store: spark.sql.session.timeZone: "UTC" online_store: path: data/online_store.db +entity_key_serialization_version: 2 diff --git a/sdk/python/tests/integration/online_store/test_e2e_local.py b/sdk/python/tests/integration/online_store/test_e2e_local.py index 46198a4062..34758a50d0 100644 --- a/sdk/python/tests/integration/online_store/test_e2e_local.py +++ b/sdk/python/tests/integration/online_store/test_e2e_local.py @@ -51,7 +51,7 @@ def _assert_online_features( .values[0] .float_val > 0 - ) + ), response.to_dict() result = response.to_dict() assert len(result) == 5 diff --git a/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py b/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py index 25eb061930..07e22017b5 100644 --- a/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py +++ b/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py @@ -311,7 +311,7 @@ def test_write_batch_non_duplicates(repo_config, dynamodb_online_store): table_instance = dynamodb_resource.Table(f"{PROJECT}.{dynamodb_tbl}") # Insert duplicate data dynamodb_online_store._write_batch_non_duplicates( - table_instance, data + data_duplicate, progress=None + table_instance, data + data_duplicate, None, repo_config ) # Request more items than inserted response = table_instance.scan(Limit=20) diff --git a/sdk/python/tests/unit/infra/test_key_encoding_utils.py b/sdk/python/tests/unit/infra/test_key_encoding_utils.py new file mode 100644 index 0000000000..449d6819a1 --- /dev/null +++ b/sdk/python/tests/unit/infra/test_key_encoding_utils.py @@ -0,0 +1,30 @@ +import pytest + +from feast.infra.key_encoding_utils import serialize_entity_key +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto + + +def test_serialize_entity_key(): + # Should be fine + serialize_entity_key( + EntityKeyProto( + join_keys=["user"], entity_values=[ValueProto(int64_val=int(2 ** 15))] + ), + entity_key_serialization_version=2, + ) + # True int64, but should also be fine. + serialize_entity_key( + EntityKeyProto( + join_keys=["user"], entity_values=[ValueProto(int64_val=int(2 ** 31))] + ), + entity_key_serialization_version=2, + ) + + # Old serialization scheme, should fail. + with pytest.raises(BaseException): + serialize_entity_key( + EntityKeyProto( + join_keys=["user"], entity_values=[ValueProto(int64_val=int(2 ** 31))] + ), + ) diff --git a/sdk/python/tests/unit/test_serialization_version.py b/sdk/python/tests/unit/test_serialization_version.py new file mode 100644 index 0000000000..00562e4000 --- /dev/null +++ b/sdk/python/tests/unit/test_serialization_version.py @@ -0,0 +1,17 @@ +import tempfile + +from assertpy import assertpy + +from feast import RepoConfig + + +def test_registry_entity_serialization_version(): + with tempfile.TemporaryDirectory() as tmpdir: + r = RepoConfig( + project="prompt_dory", + provider="local", + online_store="redis", + registry=f"{tmpdir}/registry.db", + entity_key_serialization_version=2, + ) + assertpy.assert_that(r.entity_key_serialization_version).is_equal_to(2) diff --git a/sdk/python/tests/utils/cli_utils.py b/sdk/python/tests/utils/cli_utils.py index ee6ea138fb..a038b85840 100644 --- a/sdk/python/tests/utils/cli_utils.py +++ b/sdk/python/tests/utils/cli_utils.py @@ -84,13 +84,11 @@ def local_repo(self, example_repo_py: str, offline_store: str): repo_example.write_text(example_repo_py) result = self.run(["apply"], cwd=repo_path) - assert ( - result.returncode == 0 - ), f"stdout: {result.stdout}\n stderr: {result.stderr}" + print(f"Apply: stdout: {str(result.stdout)}\n stderr: {str(result.stderr)}") + assert result.returncode == 0 yield FeatureStore(repo_path=str(repo_path), config=None) result = self.run(["teardown"], cwd=repo_path) - assert ( - result.returncode == 0 - ), f"stdout: {result.stdout}\n stderr: {result.stderr}" + print(f"Apply: stdout: {str(result.stdout)}\n stderr: {str(result.stderr)}") + assert result.returncode == 0 diff --git a/sdk/python/tests/utils/online_store_utils.py b/sdk/python/tests/utils/online_store_utils.py index f72b4d5a2a..9cd7663869 100644 --- a/sdk/python/tests/utils/online_store_utils.py +++ b/sdk/python/tests/utils/online_store_utils.py @@ -45,7 +45,7 @@ def _insert_data_test_table(data, project, tbl_name, region): dynamodb_resource = boto3.resource("dynamodb", region_name=region) table_instance = dynamodb_resource.Table(f"{project}.{tbl_name}") for entity_key, features, timestamp, created_ts in data: - entity_id = compute_entity_id(entity_key) + entity_id = compute_entity_id(entity_key, entity_key_serialization_version=2) with table_instance.batch_writer() as batch: batch.put_item( Item={ From 6d7b38a39024b7301c499c20cf4e7aef6137c47c Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Wed, 20 Jul 2022 16:48:07 -0400 Subject: [PATCH 44/73] docs: Include docs updates for release process. Clean up old docs (#2957) * docs: Include docs updates for release process. Clean up old references in docs Signed-off-by: Danny Chiao * clean up third integrations page Signed-off-by: Danny Chiao * clean up lint Signed-off-by: Danny Chiao * add clarification on credentials Signed-off-by: Danny Chiao --- README.md | 21 ++--- docs/community.md | 7 +- docs/getting-started/feast-workshop.md | 22 ++--- .../third-party-integrations.md | 48 +---------- docs/project/development-guide.md | 81 ------------------ docs/project/new_branch_part_1.png | Bin 0 -> 39422 bytes docs/project/new_branch_part_2.png | Bin 0 -> 144655 bytes docs/project/new_branch_part_3.png | Bin 0 -> 120888 bytes docs/project/new_branch_part_4.png | Bin 0 -> 111015 bytes docs/project/new_branch_part_5.png | Bin 0 -> 73064 bytes docs/project/release-process.md | 27 ++++++ docs/project/versioning-policy.md | 20 ++--- docs/roadmap.md | 22 ++--- .../unit/infra/test_key_encoding_utils.py | 6 +- 14 files changed, 63 insertions(+), 191 deletions(-) create mode 100644 docs/project/new_branch_part_1.png create mode 100644 docs/project/new_branch_part_2.png create mode 100644 docs/project/new_branch_part_3.png create mode 100644 docs/project/new_branch_part_4.png create mode 100644 docs/project/new_branch_part_5.png diff --git a/README.md b/README.md index 64f6c5940f..efe194687f 100644 --- a/README.md +++ b/README.md @@ -139,7 +139,7 @@ The list below contains the functionality that contributors are planning to deve * Items below that are in development (or planned for development) will be indicated in parentheses. * We welcome contribution to all items in the roadmap! -* Want to speak to a Feast contributor? We are more than happy to jump on a call. Please schedule a time using [Calendly](https://calendly.com/d/x2ry-g5bb/meet-with-feast-team). +* Have questions about the roadmap? Go to the Slack channel to ask on #feast-development * **Data Sources** * [x] [Snowflake source](https://docs.feast.dev/reference/data-sources/snowflake) @@ -151,7 +151,6 @@ The list below contains the functionality that contributors are planning to deve * [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/data-sources/postgres) * [x] [Spark (contrib plugin)](https://docs.feast.dev/reference/data-sources/spark) * [x] Kafka / Kinesis sources (via [push support into the online store](https://docs.feast.dev/reference/data-sources/push)) - * [ ] HTTP source * **Offline Stores** * [x] [Snowflake](https://docs.feast.dev/reference/offline-stores/snowflake) * [x] [Redshift](https://docs.feast.dev/reference/offline-stores/redshift) @@ -184,30 +183,20 @@ The list below contains the functionality that contributors are planning to deve * **Deployments** * [x] AWS Lambda (Alpha release. See [RFC](https://docs.google.com/document/d/1eZWKWzfBif66LDN32IajpaG-j82LSHCCOzY6R7Ax7MI/edit)) * [x] Kubernetes (See [guide](https://docs.feast.dev/how-to-guides/running-feast-in-production#4.3.-java-based-feature-server-deployed-on-kubernetes)) - * [ ] Cloud Run - * [ ] KNative * **Feature Serving** * [x] Python Client - * [x] REST Feature Server (Python) (Alpha release. See [RFC](https://docs.google.com/document/d/1iXvFhAsJ5jgAhPOpTdB3j-Wj1S9x3Ev\_Wr6ZpnLzER4/edit)) - * [x] gRPC Feature Server (Java) (See [#1497](https://github.com/feast-dev/feast/issues/1497)) - * [x] Push API - * [ ] Java Client - * [ ] Go Client - * [ ] Delete API - * [ ] Feature Logging (for training) + * [x] REST Feature Server (Python) (See [RFC](https://docs.google.com/document/d/1iXvFhAsJ5jgAhPOpTdB3j-Wj1S9x3Ev\_Wr6ZpnLzER4/edit)) + * [x] REST / gRPC Feature Server (Go) (Alpha release. See [docs](https://docs.feast.dev/reference/feature-servers/go-feature-retrieval) + * [x] gRPC Feature Server (Java) (Alpha release. See [#1497](https://github.com/feast-dev/feast/issues/1497)) * **Data Quality Management (See [RFC](https://docs.google.com/document/d/110F72d4NTv80p35wDSONxhhPBqWRwbZXG4f9mNEMd98/edit))** * [x] Data profiling and validation (Great Expectations) - * [ ] Training-serving skew detection (in progress) - * [ ] Metric production - * [ ] Drift detection * **Feature Discovery and Governance** * [x] Python SDK for browsing feature registry * [x] CLI for browsing feature registry * [x] Model-centric feature tracking (feature services) * [x] Amundsen integration (see [Feast extractor](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/feast_extractor.py)) + * [x] DataHub integration (see [DataHub Feast docs](https://datahubproject.io/docs/generated/ingestion/sources/feast/)) * [x] Feast Web UI (Alpha release. See [documentation](https://docs.feast.dev/reference/alpha-web-ui)) - * [ ] REST API for browsing feature registry - ## 🎓 Important Resources diff --git a/docs/community.md b/docs/community.md index c0ead3dda1..dc1cc8a0fe 100644 --- a/docs/community.md +++ b/docs/community.md @@ -1,16 +1,11 @@ # Community -{% hint style="success" %} -**Speak to us:** Have a question, feature request, idea, or just looking to speak to a real person? Set up a meeting with a Feast maintainer over [here](https://calendly.com/d/x2ry-g5bb/meet-with-feast-team)! -{% endhint %} - ## Links & Resources * [Slack](https://slack.feast.dev): Feel free to ask questions or say hello! * [Mailing list](https://groups.google.com/d/forum/feast-dev): We have both a user and developer mailing list. * Feast users should join [feast-discuss@googlegroups.com](mailto:feast-discuss@googlegroups.com) group by clicking [here](https://groups.google.com/g/feast-discuss). * Feast developers should join [feast-dev@googlegroups.com](mailto:feast-dev@googlegroups.com) group by clicking [here](https://groups.google.com/d/forum/feast-dev). - * People interested in the Feast community newsletter should join feast-announce by clicking [here](https://groups.google.com/d/forum/feast-announce). * [Community Calendar](https://calendar.google.com/calendar/u/0?cid=ZTFsZHVhdGM3MDU3YTJucTBwMzNqNW5rajBAZ3JvdXAuY2FsZW5kYXIuZ29vZ2xlLmNvbQ): Includes community calls and design meetings. * [Google Folder](https://drive.google.com/drive/u/0/folders/1jgMHOPDT2DvBlJeO9LCM79DP4lm4eOrR): This folder is used as a central repository for all Feast resources. For example: * Design proposals in the form of Request for Comments (RFC). @@ -27,7 +22,7 @@ ## Community Calls -We have a user and contributor community call every two weeks (Asia & US friendly). +We have a user and contributor community call every two weeks (US & EU friendly). {% hint style="info" %} Please join the above Feast user groups in order to see calendar invites to the community calls diff --git a/docs/getting-started/feast-workshop.md b/docs/getting-started/feast-workshop.md index c883625dac..8b6778c2d3 100644 --- a/docs/getting-started/feast-workshop.md +++ b/docs/getting-started/feast-workshop.md @@ -30,13 +30,15 @@ _See also:_ [_Feast quickstart_](https://docs.feast.dev/getting-started/quicksta These are meant mostly to be done in order, with examples building on previous concepts. -| Time (min) | Description | Module | -| :--------: | ----------------------------------------------------------------------- | --------------------------------------------------------------------------- | -| 30-45 | Setting up Feast projects & CI/CD + powering batch predictions | [Module 0](https://github.com/feast-dev/feast-workshop/tree/main/module\_0) | -| 15-20 | Streaming ingestion & online feature retrieval with Kafka, Spark, Redis | [Module 1](https://github.com/feast-dev/feast-workshop/tree/main/module\_1) | -| 10-15 | Real-time feature engineering with on demand transformations | [Module 2](https://github.com/feast-dev/feast-workshop/tree/main/module\_2) | -| TBD | Feature server deployment (embed, as a service, AWS Lambda) | TBD | -| TBD | Versioning features / models in Feast | TBD | -| TBD | Data quality monitoring in Feast | TBD | -| TBD | Batch transformations | TBD | -| TBD | Stream transformations | TBD | +See https://github.com/feast-dev/feast-workshop + +| Time (min) | Description | Module | +| :--------: | ----------------------------------------------------------------------- |-----------| +| 30-45 | Setting up Feast projects & CI/CD + powering batch predictions | Module 0 | +| 15-20 | Streaming ingestion & online feature retrieval with Kafka, Spark, Redis | Module 1 | +| 10-15 | Real-time feature engineering with on demand transformations | Module 2 | +| TBD | Feature server deployment (embed, as a service, AWS Lambda) | TBD | +| TBD | Versioning features / models in Feast | TBD | +| TBD | Data quality monitoring in Feast | TBD | +| TBD | Batch transformations | TBD | +| TBD | Stream transformations | TBD | diff --git a/docs/getting-started/third-party-integrations.md b/docs/getting-started/third-party-integrations.md index 8a862891f8..ef47a11029 100644 --- a/docs/getting-started/third-party-integrations.md +++ b/docs/getting-started/third-party-integrations.md @@ -11,55 +11,11 @@ Don't see your offline store or online store of choice here? Check out our guide ## Integrations -### **Data Sources** - -* [x] [Snowflake source](https://docs.feast.dev/reference/data-sources/snowflake) -* [x] [Redshift source](https://docs.feast.dev/reference/data-sources/redshift) -* [x] [BigQuery source](https://docs.feast.dev/reference/data-sources/bigquery) -* [x] [Parquet file source](https://docs.feast.dev/reference/data-sources/file) -* [x] [Synapse source (community plugin)](https://github.com/Azure/feast-azure) -* [x] [Hive (community plugin)](https://github.com/baineng/feast-hive) -* [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/data-sources/postgres) -* [x] [Spark (contrib plugin)](https://docs.feast.dev/reference/data-sources/spark) -* [x] Kafka / Kinesis sources (via [push support into the online store](https://docs.feast.dev/reference/data-sources/push)) -* [ ] HTTP source - -### Offline Stores - -* [x] [Snowflake](https://docs.feast.dev/reference/offline-stores/snowflake) -* [x] [Redshift](https://docs.feast.dev/reference/offline-stores/redshift) -* [x] [BigQuery](https://docs.feast.dev/reference/offline-stores/bigquery) -* [x] [Synapse (community plugin)](https://github.com/Azure/feast-azure) -* [x] [Hive (community plugin)](https://github.com/baineng/feast-hive) -* [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/offline-stores/postgres) -* [x] [Trino (contrib plugin)](https://github.com/Shopify/feast-trino) -* [x] [Spark (contrib plugin)](https://docs.feast.dev/reference/offline-stores/spark) -* [x] [In-memory / Pandas](https://docs.feast.dev/reference/offline-stores/file) -* [x] [Custom offline store support](https://docs.feast.dev/how-to-guides/adding-a-new-offline-store) - -### Online Stores - -* [x] [DynamoDB](https://docs.feast.dev/reference/online-stores/dynamodb) -* [x] [Redis](https://docs.feast.dev/reference/online-stores/redis) -* [x] [Datastore](https://docs.feast.dev/reference/online-stores/datastore) -* [x] [SQLite](https://docs.feast.dev/reference/online-stores/sqlite) -* [x] [Azure Cache for Redis (community plugin)](https://github.com/Azure/feast-azure) -* [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/online-stores/postgres) -* [x] [Custom online store support](https://docs.feast.dev/how-to-guides/adding-support-for-a-new-online-store) -* [x] [Cassandra / AstraDB](https://github.com/datastaxdevs/feast-cassandra-online-store) -* [ ] Bigtable (in progress) - -### **Deployments** - -* [x] AWS Lambda (Alpha release. See [guide](../reference/alpha-aws-lambda-feature-server.md) and [RFC](https://docs.google.com/document/d/1eZWKWzfBif66LDN32IajpaG-j82LSHCCOzY6R7Ax7MI/edit)) -* [x] Kubernetes (See [guide](https://docs.feast.dev/how-to-guides/running-feast-in-production#4.3.-java-based-feature-server-deployed-on-kubernetes)) -* [ ] Cloud Run -* [ ] KNative - +See [Functionality and Roadmap](../../README.md#-functionality-and-roadmap) ## Standards -In order for a plugin integration to be highlighted on this page, it must meet the following requirements: +In order for a plugin integration to be highlighted, it must meet the following requirements: 1. The plugin must have tests. Ideally it would use the Feast universal tests (see this [guide](../how-to-guides/adding-or-reusing-tests.md) for an example), but custom tests are fine. 2. The plugin must have some basic documentation on how it should be used. diff --git a/docs/project/development-guide.md b/docs/project/development-guide.md index 6d5bee16af..58e29a5ca7 100644 --- a/docs/project/development-guide.md +++ b/docs/project/development-guide.md @@ -11,85 +11,6 @@ This guide is targeted at developers looking to contribute to Feast: > Learn How the Feast [Contributing Process](contributing.md) works. -## Project Structure - -Feast is composed of [multiple components](../getting-started/architecture-and-components/) distributed into multiple repositories: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
RepositoryDescriptionComponent(s)
Main Feast Repository - Hosts all required code to run Feast. This includes the Feast Python SDK - and Protobuf definitions. For legacy reasons this repository still contains - Terraform config and a Go Client for Feast. -
    -
  • Python SDK / CLI -
  • -
  • Protobuf APIs -
  • -
  • Documentation -
  • -
  • Go Client -
  • -
  • Terraform -
  • -
-
Feast Java - Java-specific Feast components. Includes the Feast Core Registry, Feast - Serving for serving online feature values, and the Feast Java Client for - retrieving feature values. -
    -
  • Core -
  • -
  • Serving -
  • -
  • Java Client -
  • -
-
Feast Spark - Feast Spark SDK & Feast Job Service for launching ingestion jobs and - for building training datasets with Spark -
    -
  • Spark SDK -
  • -
  • Job Service -
  • -
-
Feast Helm Chart - Helm Chart for deploying Feast on Kubernetes & Spark. -
    -
  • Helm Chart -
  • -
-
- ## Making a Pull Request {% hint style="info" %} @@ -148,5 +69,3 @@ The language specific bindings have to be regenerated when changes are made to t | :--- | :--- | :--- | | [Main Feast Repository](https://github.com/feast-dev/feast) | Python | Run `make compile-protos-python` to generate bindings | | [Main Feast Repository](https://github.com/feast-dev/feast) | Golang | Run `make compile-protos-go` to generate bindings | -| [Feast Java](https://github.com/feast-dev/feast-java) | Java | No action required: bindings are generated automatically during compilation. | - diff --git a/docs/project/new_branch_part_1.png b/docs/project/new_branch_part_1.png new file mode 100644 index 0000000000000000000000000000000000000000..e8e59d82140a49ab3ef155c005b53c74920a612b GIT binary patch literal 39422 zcmeFZbyQqU@HU7B3&DcB1}C_?ySvNagy8NFAhfY+UUDfi`)76PoQIbYQCP0RQfVqUnzmQ zAo7}=ER)iUiGV;J!In=Yp+P~c@Pk^+EObz{{DJlf6rTzLfANyJ^M$rU_bLU~#05@= z!tuh+a|hZ7ZAD9b!jEPNw9lcUI&r1qnleIPb91^lQAOlrHawH=I@pRWh-tL|IL1>Hu&xCG&K!f9JyIDM<0p z6MT^G&tet|vVR_NwG*V!QB)xlcXS4jaWTJTeoG;QOh!f~;A~;ZrzRowTOIP3AceK7 z>w7*H77q^(W)F5|M`tS*R$g9SmbYvyY-~)BCzxEk-np83GQD%5{I`%lxuU=S{(Vn?r_Fygy>t29EJy=cp6{@*GQVZ{TQ)>h;91J2 zV&e(0*OjnwfS3nThY$zHTY-O`|3BUNug3qOsq>#ECnx9s)chZJ{*RiPE&yk7M+Zoi zu0sEL=C|_yy!l&EfaSU6|HBghn)x3o#Lhy<0xW-hCWO4RCa(nrB?2WYA*$gC{VNB- zL_-ocj3pk%<{SsH{c0u3RYF3v%(bu}u!IZ;ou5oI6I`sFU{pvM@fu$2sQror5h*la zjL~u4&^<{}=9{Hpf$3@%_eECI-PlSCdls#q$#*PMGHA>{7cO=DoaADkWdamZ{J&Qq zjY#b`!XP-Az<;i;A|#cH=JVqkCn^z`f3MdJZ3w^b2ErL+LiK%=M?)i&{(YAUqVDsr zF68I`>(rlQYZH5W36%*9;NgVifvKv$+ z&IcPvNN6Yxi}wT#w(>)2MO50kZDp%z;I*KKI_qxV7LTi{ zc5*XMP&7U)c%u&`p>ZOE;}ZEjt=2@LA`6qATOx2 z5TUYUC|V)XE9iA9!ED@$XN;JyIiADsvDF=d;JkUuiG+qW=|$K!ucFuFz+$@B2AJ&M zK9x(OW6(4z1y)M@z-FjPR>*LQ7w05t>K*^<49xH8_zzs7>i&w~rd2f_{h{0<$d38R~a62x=%cYddm9hZ}*Y#r> zIN#01AGx`m`~c4V#2tPfr$Il_1Fv=JED~@i2dx-%UVesQ#WMUHeERESN-rplQoHKD znfdPWS8_~1)zrY<6})_J65`1f%J+bX3Zpa5A);i(v1}poaI}M@vgy5uxVXG*9)F$2 zcaM{x^ISH7Ex?wAiq~ZaSLZFnO`k@#?6xi|vs!&O^On3&Y)zBt)NxvzHZz$SJO{Kk z>-@vO#VYHVaw1zZZrMU^29L{)L-(i)^tFAF9%{mV+cFgz6$X;VoqqeNFYoI+Z^%8s zr`V;M#jygaPtr5ZKGdRsNTRt2G;8poiH99O)B>6u`wsy|Se|*hE zAK5Qnbek)2mX+=Qb@~Rk0`CY}P4lsgT2X&Gn8j zky@UH_^Yd?x+Y+XV}L1x?IfU1Jta;>OHsN$ptwLl4JW<26OZ=j!rz$9a)g$6p?+eP zLEBG~UaO1|9vLTjbD-Oyyh5jAWJ%j<(Y|o8`u22la6OSF0DAW!$dBA(in@=_I0(l1evZ{Gt}^YyZZ^GIpQPS;UT;}@dTXLc{>6QGbX31l?aG9N*Y!9snov-0Iea!|SgYEB-!-{@cVgG42JofdHuF5WL+B=;6C`uN zw3%W%4UUm=97_K|2{yGZ@v@Jn<#U(_J0<;Rn3%#O(I`$;hFrjPqcf(8^+ik~<)6eE z)&@I%HeE+c!7590{=^8B7&PMb-U{?jXUcRG%*$gnw;ak~$fHuni9${(W57t%(9Yx3 zR~kwcY*NS)&{D>wmfxl%{_6jUe~vqp%it?w8JA%#jbk8C!Kne+*92Qt17ff6i&O@A87RA6>xwddbj;{;N-}YUzPu8 znsBFy9F&{K-h|@XVxUUNdyIqZg;p>1QCJwujrt5sbo5Wvo1};z`Ko7<#)ubeJMqup z-HyQ?Aw{WWtB5J(8Y$Z`grJ0;aOxay&wM3?Nz~hI{h&O!doFan28TDKm-~(u>i1Fm zj!%8^sfmtLuq55{T!gz66uCcrhmzjRUhH9I1+*n)9->GH&wj}7b0z$MhR>+}ChML1 z@UNz1Jn?|{#;+b8^&N(waeK*=RUDpN9{A`AZX=t&dK?Xgq2LyJMka1^kbsbQJPrjG zN;LNZkykj z9$R5zIx8pADAI&|OrEk~1B}tT?PYW@9nH5J_DA1JX;m8(R%5g1Dop@KKgH?SA6A4R z1M{Ws->S@dJuF}Y07H~u2CYe(Uk{tRISP>$%<8Bq`m1|AB-KquzXMZ^3DMe284&imVn)YoPFIO;Aee_}$gy?Ctr16%H2P#;))pm*Y(E zE0}@^OfGbPbP6e2-PUL!My<9`{|Bpmt*_rU2*LbproZrVUQCK`(rdpq_wJ`Hmnfa6 zJBG$W^cHTRRTDZzR3E~x zbteO~I-th2Ce}QMzRlluyF!NRP_+c=DO_1;c8*+|{f$riV-GBXI?wI|U~2W9s&;>L z#&LeJ>oM6k+cZE_z$EaZT$_2gjl&kbirWqhz?%{?5?iD0jKeZP=2D#)Pg^ zrKN5p?=DND!VHrkeJpTjmCZn7bxr#It%R^E>j6Lq9?}IXn7DOBgWpP`o^ok&eS_je z)%;{azDz@GdV*GgZjGRo)s+eR<29Z&eGf&(%g0NG`8J!RSleRYhpww11=6PnX~xz& zleGoXbO{q8iWwHE6SmaDr$zE3^OSaHK@WGMyV0-_914!)TdHWD3PofJcy|Q^KI(*T z`@*luv8nj^)iN4c91OYo_K-Di^D{T3(>_ZbBxNb6aA>M2K<9os^l=l4reM zfc=a>QWbb+-v=Grz7=l;g{jZFfj#XAzgLmI2q+#vdexiGCJvRimcxHud!ihuM~;vX z0VSV>k;xUH&p%7B`HKIxJ-HEJlHGSYwKvDVtG=^~ zS@^|0crZggDhQ`iZ2ZYIF?=G|^FnqtqMneO2b)*BAo z-t~3+P%2v_PPj-6cU=MW^?@K^A7~*msV-O?GwR5Jvz``yo;ev{sHLc%@2{U2<>3qx zXe9l|rnN5<8s^z@4p=Wa@lQ5+01tA&N~}%=JVzz7Dd_#4C!iAd03m`2G)l6=-09US zO>KUvqj9IZrg1L6Rin}86mDZ7rdX41k-5_;=OX3&o45iY!u;Mko(O8h+^Nz@8A~v! zc_{4NRq3?z8FfZoTNeV-`q-om4<)Lx>~n-;h}0$BS5xvSjz=n@v=jMM=*d+*0Uwr~ zVP_z-xvB$|S!?y)(fW-x&nQqSM@d&c819iawc9z8eIv$&CxC*(;fJUo`WM^PWQQ#7 z!JD(voU!IJ|E&CbEQJm{zJl9< z7Gt#65hzShC``-45r z8*yE7GOabNU^=X8fC;sXD`_e&vj7&$yvvrx7zkMO!}A&_o1``Cx3?u*z`*JsRwCLJ zf{nu*;)nzIAyV^Nm5<`Z)7obt&ubYb|FIlFhP=zWLBp-E#djx|6 zcpXa<{NR+~_dHStK!uDV)g<68!RcqE`#$Zt^6XFR5{sWqDKoA+jnB`q7-_G2a(U1h zYAg;^@0#8|hogaCu>w_sLMn^mka2)>e9Ka4A%=M=cUR)|q)5K9doy-aL+XW7Xxxa& z=gLgfjTF!{PCAAr8w2#jTcXXbC)u7}Ui-Ub+118h3>u&Anr_{FCD|4g=Ck>IkY($W zm{X-G3&-PSTd#hx82X$~Yh@0njg_N4z4t>^7fVn4Mhd4Ss_96;iEY8+DcDdJ8YcLC zQsHufjM--_dm(Y+XmY$h4uNE^hEJp1NZhKMp19(**c^77dZkJWt^ebr?Xs21F^Urv%g`XGPD;njczR4Fu;{6ZjKN z84-i9hd_EUxq&J~PI`H0gM%Zrc=c+}%BUq@i^3tvkl#bDfq?TOFD|Y+VQLRs z{cXA)Y!(4M)4f;C$wKOAmTPP4#oqpGN$AFA!~h@h^=;X-0SiHDxtA{7xz$voS1T0LpK>6byPoBALj|1@00GL^nCin`lBTwrP2=HGS(yXC)Tkhg!}CiBCUs^?H}g zXs%A4H0*Jx1e3+-Wh6Yc?0%vQzZyCz2%nnR_O)%Xk}tiNBW`%flY%2MJ1xXACx|-( z#q%U4FQCy{WSjBIWxQ8E`!6(|l^R`!o;L{tZzG58Y2E0q#&9m&G+d zxg^P6ZMfJphSsWdM%7XZYx)6dLKfF>|j##c6l@)hexQ%?W>sV|SZvicfCpPIKGoP?Q?WIm5N0{carOb9 zc*0e>d!T3HcGdeC*54ET*oG?Bt5}jP1M0oZ!{%9EX)&)88_5yW-3fv(4Aq-=*|^4- z|FUoF;!h+3H=27Bbqj1)oXwIe>MP4B2)`-^kM@jC;=G@EFyB4$zI&?QWuqOI<_Zcq zplB$sa`+mJ96h@BP3#C?)!q2y?Lld_b!{E|w?TCJ8klH2J_)IpgtjS1ae(2Gopj6E za|~AF_Zuc-BlxEe+{UHU8`AYMC))Dqtngvj!mLb_je3ZASF1kScj^Y}{-ioKuG+jg zW*`)X_-OEG<4v|8cXnt32}457%fndl}BD%uiq1DCit`4RLbV#C8L=xb6sLWF)=D%2-OTE*%yw5jX*pjtX&Q-1t zh^;+;I0egl6Vw`(r*7K_N?-I!W)V&Ic|L>Z2o#vPpOw;a?-Fi{L~6(s6}ZCWlZD>? zl+{2TC?jDs{cv29vteDD%DJOT*iOvvt#VMxrX*bW_5hX?%?BFXHDRV7+g_ip*IHb` z{=qS z5f~f3RfXjty3uFlI0K~^R7>sw9yF}U!Wbhd3u8DncGOZn(q zR68%;U;1(5Ll;U^O0x})JoTS~$e^*H!^?vZBmPjB5Rx(oLQ*nB!z})xFsat!Arf(p zw*LgkaI{(mrmyE-{C*!Y=x~UHD1ROPuRxnjq;>xw8XZUPjowJ`w6<{)pW8mD z-RFu*;66d4LPu69R{&$*t=&3^({?IZzv-Qvb%{^(LDZ><|8u{7uMI;|J6*fL)R9^r z)JLw6>n1F_t$+K3sg65{`D*tHr4Wh6B?xX%hM~4qR4gx-B)dT4rkm?5p$o z{34yN^{>N7Av1Q_8l*dFpH&YIt{YUm2vka8OSwNA=eju=P<4KKe9+lf-|SZ`v>aWi zvmB*w@j3&(W;W1S@hVQTUv8BY6B9G5%>9X@QK|-L@jMA8681N+x;Q!cae$15mKHLyXiQpAxCRHlp=WQ2sUbl&SVIxNx;r!sLCzYlZIAnf^v zgC+~&p+vWv6oDV+NY>X@9u7MoaGN!S_`W`3-~xp;TmrU8-8_9W6-V|R2GimR(=rhJQfKK@sL|=Jg}Ip zGIZYTM_vAQe}f|Pa9b@uT~-|tS6vN3&RR(Qh<=rN7X*pC>Xx#3LLrbn0}Y5v%RudA zjO;W~Ltl?BlkacNvZxGD+=&1-C-e zo~A-)F#X2CK_xI(X^+ozo=&4AzED2TsL5`&%7iJG&uxz)joDCbW7MKWJ_iEUD(}h; zg>O9%7C66550zYjQifZPz{A>8srlvF)tPsF|9TL5mI}T2Rbw1!m~6g8WD1!qV+W|S zP$Yx9$a{0Vfv4Ghy@p1uUZ6v}p~0+M+l+g9p-fhvFEK(%XF4W7uJX|gP6juBTv~RR!UeBw zfhIJ|Iu(TMKAklt%qzdSJXoA)^TO)xA^GgOJ2BDuRJu~#?Hq7q@mP0wRlNmj3ArWGN+b#^8>%Nq1wStD^BtD8YL(BE@pd@26TLEGP9gLq zqniYzWoavYgls_!rFJHc;4d#U@~J8tc!4XGEiQhIgRfWIZf`&53>PVOhHckpX!z-# zX~x@x5X#MoCvmRNG<;F}yy8&7!nu=C7Z|Gp3QSI(z;bbhfdRZf10E_ zC$bhzA}I9{hY3i!Hn?*{N(NzMQ{O1(T6ZQ7B^BG)fr4NWTs!m{Y#Bbgj8u^#N+=G* z=4fV6HgvuOEjFJhW31#3%v0>zbStmrJ2%8ct)G zd}Kpsj$H`*n8j&V6*^9b&t+foZeVY^e5(iTPA&?^arpYg+$}q=I&UObQ2e_#Tz!F9 z81kNPr_he@zYwBImQ1S;xq`$iYF4w{QGhxH6K{6n#c$G9JVK*J2A ztBFZ`U<`9JtU1Ob&u~QMluS|Nh4kS)RKY+6VGbhk} z{$sV@-KFi$x>?^x*$*%Aixe{0%5uiiGWk7>4#v$hHjg^FWha57BdYHuggn1fC1ZFU zuR)ip7Np+rq5X?+43b1DKBlOLa}E74WVq-#zAY-S<=f8Gtj-gNtl?AM&Qc>;X)>aN z3tu`8znVVXN!CjzTI$fz74o*NO>DX@cYc3|Sx4?vLbl^DcsL;L!X7hU}DLQU_FE_PcOrTSzY4N*@rBWvORLn*8 zjuAGM6akKkTYBCo?)%3s4pa-Jtu->6I*U`Cn7B&w{RitROt#)cRa79--Nk-&n#Iuy zamB5Vk+L{8gVvG^CYa$iu0!vIk6%*-;XwcEbbt^^(m(t5%{W0i7rKz3%m#ezy&a*q zZA+=~%k2)-0-MO};$JTRNXK5bOA-JOWWA}+7N}A6O@2R=*%a)aD%j?dq3ON3m5C{x zn>$1$nMp<|r%YHhd|u%@g|I*T0X_sgezUsUiHb6P5_|+UEx8C8Gl)PTGRfS4pp8kT zlGM?mbXFCq*n8Fcd*2lHJvxVr!F1LF0On8zd$Hc3&j>v;@J;bFhrAg$M59E-)CJ=h ze<+pd>&^Z4C}EkP|Lu6hNcPQqI-XN%^&r}tC8PG06x9MLP@9)agK!jpfJmB#v8E8; zui@pfab#tOe(JFKKZrc&ni{>_jJj1Zyd(%7Ua6kMNcIAhm#K&V`mtO-;?MkkLXfCU z%$8m_)n(w(jW)Puck$$5zX7(=QG}o!RFbUcch0jhUuQWLu2S{RgE~+xGOey|%J#Bo z%8rw&RWWY%d=jXrsxBb3fB^gIv%ml2?fT=q6|BW1FveR_!P7Hcs2@aL@$uyJRE@!o zV0W5JM-Yn;dQu89fK%Qr`^bd6aNN`6f4B<(GV(W#_VpmU-kbss$f8mtbmrCTEa=YM zd=Td5P-ik3b?f$q(M>P*rkmuvA8t8^vbgh10hWqIZ`@PUw--6 zB&f@|&+gSQZDRAZOX?kvCeEKqmzQQG#E5pdJSD_|E~G;rL+s9yyTDX#>1V5-ET7Xe zyHQ^KLUw<~q245jbFpp2Xf=>JIS1MsB2n3hkg z72qR>R|qr7geUCbMXph!lHV@yf};|EVK_5Asm#ncaUbXaWE>>!7~a2&VxkCr+Ijm?uoyHa74hP?6pf45A{W7*hNu2|Oqual2B%P{+R*BtDSvIWF!I1*H8^$$1XaC#~yPem80D z1Eh^h?^Wihe>eL58;GF}IWm8t{BBY6bD+;nq~RX<=ZzyF90Pw0&7Ih9+rE2_;s5{p z8iCRqy;BgAefrO2gwS!{5y&wyFxKBam<9aj#Lz%#iC%N=UF5H#fBN%UEF2HQR%`)C z=y$GKARz|CX9e9B+JAgT{G6(`SyHF`$7fQ{KC^y1kNwAI($7BQDf@->$7dMNJ|j?Y z5BuY@glC_{(%(t^@tFj~XE86549Wj=ooCADf4|269cw(Yn^8L@QmK7XA(zcJ!vE`b z%vaEH&GviRtEj0Gk(e3fp7+-YM7*vtkf;)oOh%Tn}^)FlXMvK3-lLUumvRH|Ar%A8`5(t)Rw0zuRP!WkG<)PTh@@Tb%MoDW$-A*nyElQ7_s zU3Ex2&Q#E^^j(Nb3cpW2dCCmv5%=!&3fQ^zYyJx#OVtj9zExgbYj*mwed`-jwj%uZ=am=ahLyDi_`Zq2mYHH(Zcx)f2rt%ej1TEQW)fW;;zT$_J_Y2gtcUzHITC%JUq1cMJEC%sC;jiQl1-!uDZqA%9 zM{IJctZO&4b{+0a+!EgWo-IXSe$b$^m{Gf^#5cy9?CsQMCeC*{R=ZkH%8g}n2_DC2 zkv$-XPU)~ zzzFg@(anJD(J!o8e8$0lujGeEKmagbw=%nK#QO%7@tnn%{z9zzQw1?1Bv)|1Xej-$ zn*$5ziTpGU_LZYNrWwYAAAZ3v{=VJ3-$45FxMG?5!Z?7xVJK-4|aNO$#V zD!#tG1;r4~0v2bOq$YNZYV8TS<&jc=Oe-^Y-N}rGQph;W_9-*R_V)PSQLHn6?>xP7?Bap(>pA9sQ9#4OgNQ4UW-@5u)V3_y}qlCafak54kfUsxqot4txYPFOVO(` z?xZPIFJ>+NFpdI;?B0;mBHq?7DT7nH96pCFN*_s5~tabtaFA@9y6lOoK&yA~o#7&6Zr zY4iP*PTJ!rX)lFS0t2@ePEWpt&A$$`p{$^1yVtU3*3SVdrYYT(nqXu64^=Fgw`SCb zsx|*@+Gta)O-zbZAx1m=!Frex#*kJ2WH_tgZ<4!25>%fYM-b~DFd8p%55a$t#w!&u zfBQow1Uuv-MGyUk!GUlaX%MCRQ%i^c#tQ@rU@Dp@{K3vt=$#OyjWljM%HPW)W&{lc z_gcf4JO4%?WRR#8qSV=@OZXdt#EWo1V2cD6Uee!Lx0eN?bPXO|_`5RZ%R*p!W>5xOFzl|SJ6xBkA~$uqY1_xi&1w{Gj_R?@ts`EPCd|Ac9D3}}?I zxMDaQ?o*=)+4X=*xwP#*9dBBk!M;@>4S$xcRUXR2)^Repwh{fnAz z_*GK<^v<3z5#_7dcpBNdb7X^jogmz-Fp=c@2Ge%mvkUF##SV>BDEVsTfVuopho*Wq zV6?34i`DqhBP;@yMv>@X24#4K{ui}63#2+Z?R*KRQqA%Yn%Q940x8skkL1G~jRU(A zKuG2x;V##L(dM77*b5KAK(rhkpmT);| zczX6Kg!td|iHPbmim!hQbIs`J=t%tmIbxuOe|%F60dO$*y!QyDs_m`QZn?kXFcEu< zh^*`|QTk;o#7qHtM2!C_IF%hVR2LgOO4D-XbK9e7M@ z!1Z^>S$iD>N3C^#T93O*G|I$hiHH_O3(02lBKL+t4j_~DWglv%Ov)Z3&O(a9p`+M* zbN{-40%8(eajLB?$41=E zbE4GfK&XHgznxf3K}ecVB2bYqYqN?rO7^TtKVv*cP=*AZ%#p)$`JJ;q!^dTbR}|Wn zMhi|uDi>lv~OX^dMQM0c4-3M_c8Z=)uNI$^OL{6zSe0N<{8RiXmTnsP8c&j zXm=L9sr@IF3__BM*kjiCv-MdH)oP>$bX1+tlaOhsSowaS2GKQ`bi+&(dsOd~F=;nO zC({|~aXHT4E^$(B3}Gq6M0)yo5vsC!x0h)VYgTCt(K~|Wuq(%6uy~gBKI}~!sd}>| zKp^fcTjs#O{o_Q5EWxvgp1z|+PwpdT2$e%a#x2@F#F?C-LIW+yOQ_m8$F$rRrwR$m zi7&L&m)*vcsaqT|>Cy#DUxRWLo`NPjlF-gqACo>i^KaE+{hXpF*W7tvkD3x<49}K% z+K_F0w+u|B4WJN52w<771Efsb&44cUx#7g%zfM;fH0W)QW*yb!ClxF0DsJg@EE`yk z-Y8xkF7Lc`u)uv>X_KsBBskD%vxw9v(@{0@Dk@uMh#%0JQ z4TqtFSD)T`thAGxp(uWQ(E>e=YG$__htmP&>E{X7jqM(_Zu73zYgkiIT&v^3wZO;v zS`V+=XkAQ_N0}Ahf}j<|S9j?|n!|~26muz7QAz0INHXrVe@#bBb3X__7<3SDsPNT0 zzPUlcZut6g!N_R183J)+F&e7nqP7*ml~RtjKhFC;RiDp1rS868`l3zRJ}a-HV%g$z zm3v=n7Uo-V8T$JOoF^|_n;Le)YwAw#)&}VZnbnr;4hkSLHB z4%a#wfK{WbpGmKtYm03MZI;1Y+j_2gc7EzUs*(ERRh(5-3%R(JacYE&PbG8NQ?-LV zLma7ryN;HDVV6?d%ib-27{uxy(u@fGiHyy$ zb+S3Y!ll6G;m*%tZa^et+%6{h+rp(KOV(Sb=bZzk$4Fio=rT0R{`=^ zJ$V_MXU^}5Ya>ylZp1AQ=crBPro-hE)tQvHdaf0q7a(tL)HwVxg=$6h?vn z)y_SBLqIryV7M*zn0NApv={y#9`!`Divd7hTplC|=BL(hC-NfuGPA<56QzW|t#IPG zR%a&J!Y&<2ZuercgF+{<&6r6wmYyu8)^~o8(1zsA0=kX=UoL@%CVq~gNS%{Pl|hY6 z!`=3?a}Ryo7l01n;{etyHiMGL@hL|}wQmv)&Py>3Xq$MP?mEhtp1Yk@E1x-=yz_2o^t+5*TJnr5)1N(-c21%Od|IL-dzoz7v<=d1R=%f_#cy_ z55Q4;%T?SlxnS_+G8JWoi((h+tBdA&_b&RQhi>iZ_2-62kkOGlNsD9b;Ura#h$-I1|<7peqY5JjBa#DDgi;zimp5x?5B+wopvExP>*YAp2h$zY`KmR>K| zxNfo4muhqGiUbh}FXs#rrOA>k3*rM#=J?^EVUxbGMBYCFUhQfK$5_Z70mia4i9NKO z&ZZg?y>cIDSMQ6xMv)GPcqU))&*Mq{Mz<$YYuzo_%KP6Y0P0;X7#(!-ouky~^gKroz+1v2MDxnC^bs$@VpjUS}QXHY%)PFVj6m#l$EJ9f%t~{+##RwaFtvos=^n zPMYj_dO6*HKqG68cc($5tf^~FwqQaIHF;sBmR&bD^c=EGt$*ad&L~S(iLK) z<5L=HzE6CN$q0J(SRLuUq9mb`GI1Yv3wKJss1=N>@)o$4aef<$LW>c9@YcL(d8y__ zfmWuZ+4N3)3&|_x&zi~?#D6D+*f`dNI@cmt7bx)0FzZA1Puf{;2*4J$E3!s8KOL2x zxXn`c?rFjncNr4KI+ZmWHqy0tlGas^$AOd@>AHOK@Kppd_h|`}&8a-2TT6v3M$$_! zV+gTD?4=H=4)ZPa{VPq*i{TaHm-hz#S%T`FGZm^2EhN~CDkAO(j%lrKw!$8xTrizb zhqQ*K7vedd0o2-^pRscS*%-9T@NyUx?tIbn_ z$N7p0trNbV*1p3yAP6TPuqyc=;b7R}DQLil#!uNkke$?dx%7ptzl-OK)0Lnfs!crC zsY%(mhQ}72`I+sd{k#R_ASm1QSI559Rb^(QHbw|4G1Ou%_BrWHL7O0IRmpRRT{CElCIcKG$*WSj}~C5SdL}$79@q) z7{>>;v*xkw2221ohjAI_{l09@CRIBglACh7OV7&|vKF~L7#m1g8C*66eQ!S252rf+ z<}FKbeb3Eg(DV|WdOzf7Pk8bAN-D1*`r`&qLYYfW75~T3T6cV$lnizt=mhIck?a?o z)8o%Jd&r&nOP$8X`0U<=zvf!hmGXqJ?A$t{gWDiyj71*r&F;(2hOJ@uUpu8ZU7P+i zq0_1_nW}gwBNA{O6yQKz^%d`rdr%$A6K0{5ek$37oUDZ$;B8{mtB+mzsD$U(zMJ+J?kKxAD_ zMv%NTNV1Z!KIj3A3+v|l;pHwk>G7dZekV4O$xds&!@O9)+amKt`IW7qSCcgNe85_u z@2QjV&Dz7t!j#|7V^hef)cICF4ZTt`_}!JtC?)k;F)zc_X}j6n#;J46*U7qk?$#_1 zLbob=v@U*i7DE=Rj}NyL9i#r|rld$3tM(|k%#4m5KSXet^%yZQW;zpN92b+`JeDnb zZVjrIpAJ0o$V~ucDZgH&cOZ3%P1cGd?3RNn)mjptVW!~u_f9lO@<-8ds(uRl$!s3> z4*GO>QX(5buDhwB{qAze=g2(`Ixm3U=jt#mDvene%&PR3>1zJXLy`8lN0Ddk1||s! zbFos+Oj=Bx`{Bz}t;!WGg*({`+v*a>lNIN+F45zlshL0j7eE2x1F}3pf06Q$(L}zN zI?_l)pUGz)gB9H2;!v^-L+{=g*&=&wsKGQ)vyf zxa=gFiigYWR4rTVHwIKx5%am~KoYjf4imVyW+Ri%1gGyVaRYzE>sel?LgHd^M0sMjYNCawDB$qOL zhBB_O`UQ0B3QvcXv8aq!+7dOF2^(x@rmnylUw|z6W2ayI^33f&pb^p&a=oj+z3=6T zBjuO6{Iy(u$SqxNoh!bthLOGi87SIfERUrD@wmdEDDz(GP#aSHbY?-+(0(z2r4@D^ zq%KdDz_M3UVSJnHkkCfs*wpC^*dQW8czjH+{kiTX(RtB3HAsq?#nv$?B&X*VvBS$= zo%DP(iz{KCy*M&&^~5&v&=abgIB8=UsHKqXfEDX(sY)g zK?|I7-Cv|7GwDm=+zV+(QUM=32+nn-N{;UAn2S{o4A*~|z``s%0?={4yyceemv7Cr2E zu^K4cRptNS-ReI~7rie0ayob?-CM_gwZl-a!Jd3Ljd{i#pgA|0G}yo1;9U)Z5UCA; z`eh5TyA;uMBp(yP+~s5xv)E+y;jBionPwfARM1jSg75lOX{#9NYsjh$n^xW>diY$X zQuN8&vPQtYSbRN{w0}`)v|dP~XFBfsgjh**L26^-oo5 ziq#}l$`42@NwH!YM<`n^+eTcsPd)M7i4>B~y2Z_qSW?y{?|Pplk=0yE_ltX_nUQ`f zqn^oyuE`>oFW1@we7QpnrtmdP$lAyqk=Kb=i2JXXuW%51aYnOB-4*Ivcd}d3vi*cj zM~+rFW7y$nB;NMle+!FhUH-9dbte=)*Zc4QNhoP$+$8lC4b>6$zYoRen@v;bDk=_o z$)%&_b~Gq)GJF^NG1%Jae%)j%a;3afVd{h})e)OTOQB!+dZ?6U6Bg@i2XeT&s#BAE z@@cAR{K8J#W`FQa9_rgP^=1`OI*t0!c|8|JKguxh<^!4#hrad%P>~Y01CI&?hk4>g ztJ;WRIF&KEOtbuFIFrP1Giw2OQ&}hppWSMgW3ylR`>QVFaz_B<%<#$aOQ)S3f>0of5r-d{sV&r`$>L+s zWW(Th^40S24Mneqm=AhuWbgVwnr!MfA{FWzeV2=TRnj=Ic|7FH-fg(Ln5F7pQy(uc zl!}?Rh4KYcXV72A8~8QPcwKOQ!CSl>=fbb-{XPK*C_(Go+^A$^ddlLEm5L^E$UXh) z3;tL)=gXLWEAEA7?M=MW>gaeA{92gwn$aFuhwV z@Ym}i-z*$br*hdIQG06eZq3m`_4HP&x5jXAV5{)GoRGjlP(}JdluoAtRLFWXn1HN3 zb|?u4@B)Em1ZxQD6&5yUcSu(!6MZOPPUN)IVw)piyW*_g5RXOiaS?WvuiKEPfUKc3Dif0ysJKcTC<$m_$xd*?5f)09$#xVfaO8{u)V;J~>J>4-;u9ricK-8;j* zvPr5buWC9*v$&sHt{RV$y12Vbu;5N0xI=JvcMI;p zJwVXluEE{i-9vD9hv4qFSUar!a-UB-?Yz#$UsTnYHKmU}zKKOgoK!E^2|a(oubM=( z`6FN-DEbOj!ABud5qPyJwB56%fi@yt?=d=Qb=!reAQa^<38tp|B&uFbJ+DlP4~c6B_M@>PQCfXoF9zZ z;e5pQOxTqeHQ3F}YML z_3rM*1_Fj)q{)=$uaMA*6dJNp3LrFwR8&;_U9x7z#J>-p%gQ2#RhlAUnz32DYCwG! zs`&c)1Q~YKkbXYg0D{c#88zxfLuD8K@W*Tkq{ceiAJRG}ho_@rbVLD?2Zj%yN&de6 zDoXCrrXAX^(UsXGy&tVM0*V zstz{m$$`k1E?)2&9>T%;t~1Q)2p1@Q#`-ZRgh+qxa%kTF{(-T02Rk3y@eCwCId01cVGI*Iu*a- zcS3k$-1ysPir%y{O>%fJZjA(5!yx;32O1eGDGmO-Z8Wjl^|A?n<(DfIi(zNxB8hgGQRrwL8V-RuG^MRPh(eKhj@SyydtLtFCgHm4#ScgD1Ui#Q9ZUh%{lyd?GtMp z>W-2?r=V?7VxCPT7V)Q@puZRsBaTvy!7qM*xiiIiqZ^Xo`o17@f2Ss({XrBqrsG-} zTeXi)uX&Kpo#}Ta(ibcyHZ`>h1tli0OsSjw2^O)HJ%+^)j*7(u3K`JQms7{>(u>T)<+>KERi=d=wIcuOCk{Bfn4oyLDb$25xOSN1#oAeKxS*dg zIS#s?`M7SyZCK0eLANZVRTg!7yzpioyB9nb>i^F3xd73bhLy^)_KoE~(SR^JB{y4> zt9RDO7=YqmKE2#!RMv~c+dsI2LOdKvsv4Ke)|?&Lx}%KJ&-bI7=#0I;DX-Y+pIx;G zIR|y7j^5d!ibOx)84P3A?xJ-+s5Vg)(2!mqH^A3d>D;w5n_-rPNRp*3n&GJYrXI9d zXF(I(Q0Viklldph0E(jBKAlQ+E}HP%s1GRU&~?MtJGD*)G}<2)G-hfezQOdWvUm6Q z2VXroNC3_IXm0SOi`~|<#(a8T<~S#Snw&JNJz)4#`L{An(KzouWl12f^O^KZR+~DO z&HbNX7J`J6elq<)5IWpD3V4N^JT!dZ;r>KcBVPV@i-zil#7LE&QK>=p zrwh&YMSSSEKy#{6s49fU3Z|LeLB)U>o&BY#Z2qs5`Y@Q^3-fKi6;WfT$5IqxVkaQ= z48msy&sxL}bqvQC93g){N&r=E1eqs8JYE`-iTN-&GP& zCW4SNRaSW1iXhN()j$0RT;nQ)kRocjF|%xHdRA)k`0CHY(V~5J6pd=&azzx08f+=< zXf2GN4(+Bx{*<%Z#{c)R>gwn*TlWBb;ETHE>gkNq?pQi~$#jF6QMvm~TGOGyy*Rh< zW>*L_dV61jisL!3wurc9Fu>FQZR>c=tX)bxm_!nwyZZE)mOPoGrRL~$*3}tMBUKQ& zH~@M)po{BTi8oSpZr2?QpRgsQ`~{QAESzh7s>b+0v1D5~7;#R@yGj`=U%&L>UZYHa z(_y>trb5v0E=E5Tvc#&A3v}cgG~x&Sx~sY3KUA`n2yQo{2hEvhO}-2Gv{ZmeOKXQt zyRg#f>zeF3yKhl6>{kCibW@cfWIK}jDm>^92PW2u+aBby;m+a|cneLa$R->k={nEb zV|I5~`gTat(G*@W2HLQt3jWtzH4y_VuJ$Qi4K8mbWQACtaFuL1xxW$MrTA3Hv}1oL zVfZ=j{pZAYoWksDu=VM?!HMu|yClKR{i(CzEO z^^@Z#!qGr=2P#J3&%C?Prjx$!FOy23QBH9G6-wJ^3%2jGV7#Lz=>!Xm=Wyl6>q@>2 zWVx+Y(A93^dgK#(M#)t_3G+ac`Wakwi=8B$!mPqRzTh#ma2soqiA=~<+MYW-eH%)W zWEeoi$9~rSbXl*3Hdc*z+P5h5x-}l219;CKks^?Z}mP6Qs z>CZXayYw5pvsJiRBG12jNalb@o_Wx8t zUZbD9-I5!H>`uPT@qWH`INQID4BtVQ)OkfURJNSUHYX4x*=Ta+=8@wKx0KSyVuhmBh34^YKr>C zSWr)hfX9`7q`@Ve8jkKD5Qf`Rze2aFR+p2<`+P@Db!cPGe<$6;<#<{uwF>)-7%N{8 z<3tou& zU>mXZMFd}&*sL|{V#cA7(9Y~JK16``+eT=jh4D&%JAP8*d#vryt@~`%D=sE-@K~*s zSZ;A|M!it)7W?(L0XAZ-8=x92=MZZvrKSQHd6{J5#bLjAoXWvq3CAtY63jgNUMZ}M)SL9rH09iK z(uq|H2t)<6R8;(|FXgldX1R^YxsA@pD*s;�uxh{re6!>K44mHtk$Sy zLXaq<5u36lLx>BYCh;7o@gBQ9jGE&O{>l84|x>|J^rxqngKedE}GZ~PWd?k4D8MrSz^@Qp7aQggri#!p0mFt+u_w!Iq~8%P1) z`2VxFfqZ)?+kGN)IBlK_*o+mk4av{XujAW1ZUz@;Xm{Scs05aPC-Jk*qIOE#b^daT z$J}AcjzmCE1R0<(0Hj6Kgoz10QO-9@l%7up%5m%HI~;*g(l;RLHqzkno@P6z2 zm%7M+7Eq(FbAL0!xcyLPIaw{E{^j&O-^k}rlSeB?zho&7NH^U9YZ z{_WQj7$p2q$5Q<;Fdt+{dF}lsDAK5l@qF$RxU74F!xcN>dUt zOJ?x6{TffdFVF~UFacmX7xFUHV*o^P8NemJp*jJhYVB|an+_N5;}oOA3!v*Xe2YlI zDkkob&7k;U_76tk*GSfBp4FIpWV`?ghZ$3ZPRW(L$h81i_O}&W8=d}6%E%znv&dm; z6gYT(+v<0|cjz>$)?Yp4nn+7#fiNsoP3NlC4M8LAWZccU^%y#gr*)bxJy0^~Pg~o` z`N2(-qJQxMkMLv`22wBI6yj!^bc{Q=Hhi*^TDEmU= zD`~h~_mH}XzWB)ISQ0NYeRvMjL*!dR1#zm5Sqk-IRJWN#nCWQM22wqQJ!85O9wKT?1Vr zMg*yZ!*Mr!mAG7Ci{cG{%;~D%!D-D*L`1F9U+XaVp$G|ykb^jpQ77(bu0o=wL2Eb= zelI@fgw%XGwslJp1N~ z<(f>w8(UB-tz4?G$`upB~+pgpQdkK#)C}`M4&!?#G%Vk2@ zc08QP*hm%&G~@w!f4$pcy%j@CK)_DUYMb%kkLYTrJoBA#JxMA96ZveRS=BfIm^GZ= z!8Tm0;dq7AvNxU{TLFzYZC`J4`EJlPj1_qE4*j+rMond{p$mqnbhHNg;5Jazk2E#k z5~Y402SH_~=J8lJFJ+4ht>7qkpQ)c2KjC<8|MLaRw?2JcORh>~H|9ce@KL~RTKXS;A z+g7aS|Hn$k%J`!3^2uX#pY$v_Xg1Sh45R|Gh_jxECzSE2T z4zH^+A@csD6eLw>TzuZtwf`3+F@_@#nqvwUfQCHgf;m-wBn3GBPA)By$iP zjuzd-hH&0}Lkuz?ERedZe49nzgm14T^MMX`BVfINTI`*%)BZg$sL|=s8h>vx0rPK8 zj5aH0s5ji)PFIPUihG)L}Q%pD#{ONKYg07aa(2N+baO#ty){Z~!VsTO9ZuQ12&DbtOf!!F=rSFNI@r8;dBv z>USGU*aZ75lNNa}A43mTR%eDaiRv)LlNO~}N&d3#RvSX%cNjSTUP)Dp^JO{~wX{t` zCzm8!Y3V5Ok%I2fs69i2|~ybT^HTEO2#p1SXRz~v(ywHu=O z>UF`vVx$(Tcecma@g>Jn>d2Xxa$W+rxLp7g4JPm{X@~k26ck`kD=Ht(mKB^n0@c8l z@CSgCw{$V&&Y(y1a4nb1hwp-V@i(v!X8t8K$1(#6*J4yHCp= zZ$ZYT)uqo&xRtFF+_#2tpG{sTa+sbG`W!I^fMSw%Lv_%wGJ%+4M8&Eq3s1fyLuNb% zZp808rd5G2TLaOrk#ei8_hc%?%GLT^JkQyCM@Nqzp!xavskIw_$i{N1SOBHmr;g@y zr1F%9qWGJ%0^$*b&p80jLk5FE zTa!CGOm#v(l_QowbL_)4K1@bdzU;Fqp?70)q*OA$PlilAzHO^Si%ZmQf)?I>hfFHl zLT=To|1E;x*7pOtKVV_QM-|91;Lu2eS%Tm*Q`*JlXs4|1heZW>X5+emGXcw)<(2Y} z4D8o?g<*cp@-$|JVoDIn8&sGC@BJr3Yzaxp7=S}De6rFaLoEMW&%LPO1F!p+_UCV* zt3KO<1^{T|M=}(CAY7kRks_yBawX>ih$qPuh`?C>$Et0p{!xGzI9y~@uPUJ?Beb}m zM3##Do^y-e+k&>w^3_nVt_nH@cK9Eap`n7*7t% zz%;E;s&1cmWu+?3qN@OsIAt`JYN@M#)r(#h?_3-S*pcFW>60k}KxA+kij}KR`2HQDNWCg%`PU9}`AqRoSVJZg#FvIv4*`e5?k| zo2FNyDmNTM9%em!u%>6~%erc15ib1-0kQSFa>ZcwZ&PBz(~cDXx?wy)<4U8u>IM? zWTiPApY2c(FD(Dg#h-`S?tJtT1BGZab1fz&mU<9XU^1fWo)uVEX}?85GnV?$I{GC6QSBLo zmRy_J-X7A4zene@^mS2PCcQ{<>JN5M=w6r6jOWx7k47`Q6%KUm%c5cbAHfJ>7bh*Y zC0)(wrr}$n{9SqQU97cw)WYjwmavkI1^#O&7|K*8cJx4QUT&DC4`GfMN~dH!#p7yX z_d(ELt-Y<8sKxoH-1hIR)2#K=O%}%{sQyyDb+wyg5awL$8tR21vjE`Osb$54n_qER zE4lagBgwdmFPJOg&d*qO41cM5CGca^zHV4u{PVT>r57A0Zo@Kywwn5Lc!z!{b1$|@ z&!ABpYAH^Q>F1k+zoZ|KOlN=DD6p$RbO>?>KfOE(@$PROEYskC$fXUQIRh0wI{dI} z#ZMDUMnKa>Gs=z}T+9u;}0IBf>M>Ci22$J?v!SLAP)P4{njdb8EqgO=42?!yr62SdO2p>QCuKg0DZoD4;9i}w}ZKchSgwDJiaqfa9x&>)mrl$o|ilHi|e`9*yF?e^o)e^ z_J(OJOde{k*!#=9;St7;d^#f$X?W-iiw&ryMtdVsuS3&F%4#*;1KZgKtA$MWN}V=$ zll%f_GiTgK@NeET(Z9d;JkUX3etpm#0$&iRP^SJ6RsLgckBT zrMABttHzVgYowDeA`UKkt{3W$S%!Vmq!NR@`w`m!VrjlVo5zh6Z&tVFlh~?*hODV^m_zL|AG$EIAG?HxfYnzXbC*`nk`*(Dpjj79jnzEooc_hau*>$F_Ug< z+T(a2c)s65Gm&T$*1N=I4XH0>P-o(wvK`>~Eq4l|--Iac?w;a;OYauW+{!>4#tczr z(ieLJB7js|{X3DMb+aL0> z781mU6sA;I&--4ltgzD=G7-kVw+eBN9O4;)J;^|#n-4-Kpum2t$D;Pz7+x6}-t#Yt zHOoMP=iLb9;y5ye;pUs?+qDJbR6ehiNJKxa{$%|>Y#18q>A;~2#c|~@$1x5Glx%Dr zX$lM!B?9*XAobVCSgbzq+GR=wvJ;_jal-2B>nYMo><=;I)<*iXv?D&jcjLpqMQ|{K9`usgSivGH%|62!)?Da<(U&k=|mJ+6Ic?9KxMovm#>V z>2-dO62w2~Y1L05z{3--}9qD@EO`(#0ef_V1t z%ygcH5T|dXHsl9$Vw$R`$A^P1^Ubb5VP@L*H<@e){W7`^%;6Qs#!b#1P%V^KI3s&F zD0r<>HM7B{*OX2@ye#Q5TWPM_eX(2+c*JJVE~JO%e`3;E%SA^=A4|ozaW5Q?hQ)nc zhr3cO1rMuf{o$AN`2Zbr>7p@`eW9GreJcJM3>>x*7tAuOP5YPZpwnu6 z!G5^n)E8ssIY6mhY!}#!_INZDyB6IwH*p=nwP6_Ox~P_+TT%WSe1NBE565Iall`VA zFvJ73+(rFSpFA*dsrUyzU;)4frE35kfmyyk7;Za^AV!8TfvABft9DR;WH2ObEP>ou z_4X)Dk@%xl(E-Bpc{HgUg5CMf9~JUvC!d;s^F&Z-6aXx0tc9wzn9*#h@>ZISH5!O4 zrd$z9P6_k^ka?dm2n>&fe$~z98T4#|FjC4?z z3`R8tc`M^OhmA1RWg`iBf4T5pj@EGfxxO6dpE-fKqK=DH$XWdyPY7}6UqCCkMyHpp z%|sMA5;rQZ&O%^U4Q?GU1Czj4vKfejMAccT%7vCo#O3t;MW*d~HkFIx9>8GvtLNb% zZ3c!jqmV^BtTR;I(B285$cMYe2_SrhF%7naPvC5n*@*elI`$nFJz_UGGf#3!Q_~Ja z@ZmRDAX{V!i=Au9N|8P#za?1`_U%|_xkggs9|L9)zk{ezA(R`hwR;`rPpKs@ z>>bURqej8d`nKs4MH)bA<%$q)JCheK>G)QZJgt5JM&c@Wcj}5Q&cBykS7EsH@|DC) z$FqtxM1RLGMKR?-u_HSmO+IF!AN$7WKlSzZ9|}nK-sl&znxYtp{!n3>2L^Nxsdd|+ zYZ98_=V=ojH=`{7j@InvCX&cLlxYCO$slM~reY@?%(=GiTW_cu==?{l@@p-*{6xyy z3Y#gDK#{nbBwZXc6~6lSO2L32HVss5OH6fT6d!PSyxh=~Iw-Zbl~XNPKx&hLbJsd@ zS83+qjO9E8m$112$R{W*QkfMl=Q?KdD|o(a9s{;i$Dw6p7>J0Fkck?V(tDI!kiLQB zJSZF5v?X9oR+O2QT__NKS(RY2BI;F%wa#vih$D)pttFGPT(CkP>0ep4gCiF>>vd9Z zPoIXsqQLz~S?b3WRM zxt^PQ8{Rce=8g2T{diXobg5$m{u&^kVkz2N!bcq59YBNaO@FkFns59#%=Hz&=AhUW z2*?kGppvTvx6v5_dhVsX0zJ12Z$bp^OwWs|6wvdaWzS1`i-bNcHXD#9flB0oC@Ayj zf%GgJ+kD?9+VL6tyG{dKiK*jMBIibTcU>D6kE?5=;rWHKNK<+L?q>}rO`OpC^Dgft`mUZV=w@X>05do!RqCk#6 zZ~gDgIjeY>IvT30VH^#}m+8h&k}#HuY6kXFiL+8+`YD^RQ3cqFpEAv}C~9nWg23T=+a}qB-7)vlxRGYO zM9NcOQ1fVKkAJ$xnPZIW*7HuzF1f0$;Azt>+uQh2#DBthr(-Ato^M3@g9u9_^c60_JLx(p*(!3D7`Q$-b+V*5hR^T52hM3QV^UH~a+&h&cP?&oxS5W!!Ku-E$& zQmIoNcbohVD5mOGtF0P{<=`?b#nA%`l!xKKE(*Ml~Z%<~>0KUXi1# z1i$qrRIq?RBM!n8kLeWx0oRubMixMAvi`N!<~!dZT%d;mw@PMB`P}o$>t$FCU972`-GFV=C-$HiiaSN&YzrRqdtDnb+BNT zS+6nIE$wPz)a4H;Dd8@Pj9>?=4$00lYSEp7v~SeAlGTpP=ep*k)}{TD?GWvxY~8P{ z*_+g9;@`E5eSg14DYvhh{Mr0h#tEFM{O{SKWXjX_$H;nl;+5Bz z+q`d>ZRh=8(-Bo`Y}@ZesYzd%A92s|dxDXt9#`G=yxLNZ05Gn~8nWtX=Wu1UphRb# zugcHItK$NCQ5+0;{yPqIv*)u3F{0ti@s2E=ydS)r`EegdH6M0U!xsc2hkKkkzy1AM zMioi)S|#gm30g%dVPWS&E}sHOcDZxhf=_T5WZG*#7Ve%d*_ZpX>9y*mTf8^mRx1tK zH0AAmH3%>?o-`o$E}t9;oHVIga5%a?JpXtUsWR*<9lB;Stv2|R$z%|7k_!BAa4@<@ zN4FsvuBo~Q*iOZCAA*d`Kc4y&Zr$iVcbzWbWdRT#*eb%UpC@Nzb%H)jw zTP@k>uP-z{T!6vD+2dkEB^QYYGcpWpZlv?N_z)jp^1a25hSjG&<{NDG%9-^3FvvF< za$lYo_0o^eIN|7rDaWj2rsEOi)8kjrH8lLSs~>^rXJWM3J7l4GOY7e0JOCRbx(lN;cIZ7@2HX%Q|+f*l&%^4fm)%5hie{SG2`Yzmp6*DsDeC-c+^<1tX28gtnBhoh98uoPF%+78%*M#00IeJJx5tshtjoC0Cem zgVY6Y?L+}>0%T;M-b(kmo%qypWbuQbr3Pq_vB^TGWWDQS6$|17A$A;Esh$BHn=|YD zBwyav=^qZGxcX#I=BA(fw1!BZP(weC6@{V8)OHxV5xA=jefePC}lV|dvoq-THdT+v!Tihb+!iWqJf+cw)uyA6? z+Ag~+Of8nwb(hHlyGGvFfi<~JHOmcES&Aq?kZPGMJ*Uq0ZU@NUe~vUspneL0wWGnf zn?O=Rk%QTj*^TAq>3ZQNR8SsM=_xxA9y_@Ba9i--{_~Cq$daSfDkYY6X7h9! z$9L%eVm33;73)ixO|*^+saAl-#MFafM*e3Kfru0kr=sO}W-0i2AhL*J2QL-YLDqAv zq?{PRMfERND*)CdLR<)Lb*#{Z7^f!FM1!Iy{ab8DVX9izl}Bo@oo@6&kjZ-W39{~^ z)5|cN@6Ey8dwsh&u?Ot;FQcKnzXq2TGz~)J;lzfUjJv=b)MsYUyW-Sa;X$Wj8B6-? zWHUK(S3$SECphJXP4V7gK%lSaFcKzxoxXk6`hWp9#$h8Z!zTRpetkSi#GjS>EQ9EY zgjY2`&=8zFa4Hrt=II@Yw@!{SIUMSTnUH`GNVcdfeMVqN#fY0(^x#pcshFj1knE^N ztBSWbD);X{!C-@tVAgDwDrI@DGfvOhw*VvfwbTL>lIO}okpn;MS1@%{j0%&^$DiF2 zl~Bm4U#^WI=j}GT9m)ODWQmPU!FR4AI}vegr~9N%{^n^nT|CVDf(Ag?ZUibCwYKz# zhu!*w<6Y-_3zt-lFYM{KtmUjuoY&U$5T1&0(reaUaqyzMi-Fi0pG@Qwcpp_qqd&ha zT7qv+$-U;RPCr+9TUOeSPEf8QL?jLb7O#5=>PJOI70PA1o})c*4S;!K08p$m5&;u( z@rMW5x?Y~`(~j4A^N9dtJ~G>m=kj%OeOEoH5H#|cR736|eA{pZ+Yox2{Juy2vu~680UIjJkrfIqP`7DUq?v@P5F(f4&W53xz}L)B|eGf-i46z%TpSfO?*YQEeULPmi#V|$g(dc>~L z>=Yt{fBpvjo~$;ZwJu%*=GD=3ZZX6{Xy&|{lfiWE25+vQ)Q=1dvTU}ibW{0Ww0pnP zSz^0BP8Uw3E4F+iuvXD*bC0LfZWQRa#tDh4P^;i$w%??5*h_bL%PU?fv9C6J0bn!U zER`}%6*lWds;@0AF0rnfYuiJy+Zi4wDyn(S#YB8jzzm&aU2n7O{Yc<&REBtsZ#Gx< zD78DDS%8U18WAs;EfiR*GM3>7q!NJiKw@Y-jYJn1q{*Kwk_T-{nTOt#-h*$^A4TG5 zs{+?oSO$I)#R3(o`!0+cEbE%ZdK~~yQBNU#%JhEg=bFjUY}qVXa|Myxw!*cBDp0%& zbRjxb$s3FG-9GFV$zv|((HrZ8`iH*?)C zP4j>Gf0mF?nO3qkPxoAVX?rmJ$q1wlCM1JCrE5{zU7o%?62-lV+>?`&)98O~4-p3o zuJuFwdT9WLNWz>R5mhkKGbwO5#tit~AA=sqM;%y%5N z52(9?W9)N@(WGKhtF5@UgwE3pwueWnZ3P0bnt&vq#^pS$U_V)VKsuRc$zDbb2x@vj zEnYdNhEW8N1SyVh1rjXOffli<2x33Im7srf(KrQiB|Z;Bf(0T;ZUO8159A}Y$kIU3 zcEPyMOfjZnwh(0Bx7@>=kEWVbu=@jzjbMiVC#2OxVfF(C#brgcpSDXRp&Q$a{=?gcW6*e zRsXC^)Xy-S_M6`qQbI9l;rva{cMbH6AM33xQoc1;Gt?@xzpSayjqp{3aylR8*5h?f zBPDY>2Gv)7{%Tk9bib!qLx+F^$Scu(VXuE4y{-;7&_dIGd! zW&)RbQZ_c#yi96%tkF4%GfwX(2on)=Viwb&Vsx|EdY%mYPiJj5;EQao zXYy^^B2%(<$*DYF0HGgqH=6bwPf9a+zNqh{MUUh9eOh6v79(!1$T2 zD7VYqqSQyHt&%mqGAsff3E}MFcv>OoJ55^Pw3=U?yhbo|pX4#t9qcH8V|J7!hjo<{ z1i29Sbef%*Ym_T=bttHBk%rkJt~;;g+uUXFO-!y25!fLntyfYuVm@i3&qJ$$X3TC_ z*i)=dL%T$BFBEGF|KRKfK@I~BEbS)_Z;c&{#s>kx!j$+TsBEWErt(SBY`hrTsaS&_ z*Fl3;$!^A#)DGK_rbcCb_tUa&QBe^a97P-{RY*t(E{j!GuSiiNg{3yea0;O9E_>J- zt=!rj38H@CwFB>3Oh`*B#Wb*Ni>&A6oXgE6D@z+sfk~TSX_-7Gdkq=dmc6}1 zwB-WKglS^HsE=}0^r^6S02?i-k%ar3vAQyFn2Y{a;;XQR1cHlb38mIi!KOeOScYb% zXLZE48UYPXuZOs5nB2jG?J2eyAjK!u^0ky3l|7)qr?H{*6O|~QdIX?sTY8RNh2SJV zNov&mwqiyzAX>&z?b5emjMVJ_k%}ihujf-2U52H@NQy)nfxy5-Xs*--(%4#NW%Aj- zfXuK49$PEDl7d}?8)=iHSTXUTcq+hkP~&Zt|M7PTp+VEwSQM{zXE2)7a2$1xJWk{+ zs2VH}`4jV4DnEl9|B7mTiKSC>HHctUAoIR759b_^X&pwbGnLj%vDWK$*b>(t8yY(p ziTq0J+mO^M00y-c&TpYrIC#(aGC@WZtHaAs z{!KenOns^r<3u`X_O6gb#oGN?l z{5UzbCm87Hpu=DvR*Id7eQ~`!uD*)WVMcb1L1Qb&)2PJF>|+T&A!+-qd2{!f>tv+S zmWQ}HKQA?a+G|BZ%=NSF9RAP17~@;{?Ue7Tu!((iBy&s|v|; z))Pn>&VX}V;Q0%pcGbXq>6>DuW4p2J5`ZlRECmOLpDuKd3ObH@EYU4yis|J`QCtFW zGNVa@hctF3h0|al5DtqM*egv1sy@9iooSVQoO8Fy*dE9tLPyUr)6tmf354t$s=SJ$ zQPI>2(5LQp7x*#I^_T9^S^ICnu!lo=CD}(59F~q!e_Y8(GQRf8u~8Z^^TwZV0{{vz z0FZfw93s8lO$b)9(4Wi5^Vb6u|HLi8R^_5P305D ze-sb{rCZ6RugM-)Dn?{ouA|kjyw2%pTGAM{afwx6Bvo`6(}r9quE6nNuw zyr?6GFY18`u~M!BO<-r)Xo~X0W4De%AnebP3gq2&$7-AVMBTopmbjj8K5zjm z_I&$%I~9NNNw38T$>b8%SmeEoT`%1F#|2Sln_m>rw;>$71)(>UD_&h3p-f&mLM@9S9tUy*rnZKY6h_}rE?%E5 ze-3-8smd^(l&wU;bdN}nVz=YMyl$6 z^1>bdY+C8XbkZh0M_o$)qo?bEGiZARn-(?%*8P~kh=?D+?l~a&eK?=pNt+}9*3q*z ze!%haoIH6?_gl(Y+inDv2TXpk>2^}R{rxGSp|QqDF2Y5eJ}+tC%s8a&Oeo{f8hOL- zpe!{%IAR5Z-UFqd8F>bX7Z?c1BF%qS zz2TkJo5gM7;E)*RY1bv6ddB!F=VBd2@JeqDRqV>Nvqt|eHOz@dciXGYCA0E30Cy_% z6C^&K_>hIp5UPf2_H#C{xG|>i4LmsO{{1^$?~fpmw6Fs<#sacrz*dU#hGskj!hy=q zVQ2V&U8bk+3s5BiLj(+}-31kH`R_jF7zhr0DzcV{2#Wu~>Hg0?%=00|?A;4aA^}WC z;@|M`W&WANH{Nz7cUP?AasLW4gkpgEiu%jO_CM!jyzP;IhBM%3a_WEE^zUS#G_)iO z)>fb28<2s9jj1^vrS~1I1aN0U)l2hV-y8maZnd!VyKzI+#QVMTK>!VMdY97w_`$?v zu<{zPLpDY^?+u}#C>oxm$!h4`2-vWKa{NMl89rhOJL)#4c z_qcv|3-p|c71MhI_FJH5)_Anupl~WevXD_B^b2O&%G?bT(ZBZ^C4fX`N8c$xaX;g2NI(d8i^gOq4TWDq!3G(!2(!Vx!?qaN?qfr^zjn>_| zfs8X#&BQ(;^-KE#lsBbKr}89zkjT0x2sB=-44r&9>2}yQZvG-7BEDw|d$Be*BDG*R z0m{AS!<$A-8Otbn3Cg{_1N*phVG=#&p=3PAUj$mh!VRa}a@@MC%$n)>rzU(mY=iz2 z_k*uJE*HS_AQ`WS6MYvlg)wsUH)$o!0Twh3@Snv&|xEGS6j_1WC5Ojaz}G z`B2{SXX~I-=G#Zdt>EbR?YW(Toq*HGyPEkdx{~tTfZl znL{S3RS~BTuo_?8HXYRiu_yiR2C6i7?O?!o4+Ar zD9mPjvCbA~(T^7-NWIaH61pedNw+aE-a3JI5Yn(faBH?VXgUhwxW7EDaZj9Jh@0z> zv8PT)p&wz-oEoqxC!`%;)%(K58$--atv0_ZT;%ZgYNk4z+3R`Po>=7@p>%YCa;lB8 zYJvuB47p5I7BKhSB?=6m*m}}LvB_mtenN+Z$Pmcb*N(|GR57JK3L@3$xNMz1Zd9Pf z<%rauCpnh-iap_HWL|AC!s(W4Ab7u@$BdS`cPQ7U8rXU&#%K~MdcZR=Y(0}DT8iv`u@o{E-=pRvgC zVHBeA;N)P#qpwgZQAaTsDQ?eIe$^W%#Y)tSIzlfX!_4BHKOu1RkHnJbLLr_cKtRiz z;vzxAzEfW4Kl@K^_k&%;Ilx=5rbh3Ta8hnBkznR}g#Ehce|QqqBpTfLsd;w-^H z8?-k$|LSPlijd|hKhP(g>?5?uMoo7VKj~|2#++qszfg{m5ovppAf;yLZ8F`mJJ zImNiI;$EBNkv4QU|HpU?Miwqik7%m8j+=aV`riFo+>ymu(-HM?`I|qFm*I41#q+YH zO~w^=aG0fm1dn`8PCQFO?1vACsuqHCWB+Zu{t_4(8e86PIht=rWh@?vPF zN%&4-q*mGDu($8*@aqf7{aL-Kl>ty|nu;MHuA>{^Z6}{K#668qDDlrhwtLh6Oum9k zyM=43T;p+O<$#~hnR;RJ064f(cvZJPuhfYOEH!81S*^{OUvCx(*wPSKb*AP?qOFYa z`cSjQio$n}$1{RZCf=PxUr7i#XP))GRgl6dWQ03NgW!YgP1;Vz>g+V>h+8VTse87$a)n)Wt{5HTh9}-BbHC*GgX-)nB&V-gfO4joLzG@QTbFqwWkT zKqu19<4obvwk#!QCxz@93z~4iQ#G;Q)URm}$4I>$YNy=&zsIg4P}J2~ z31z}DQTcHThjuq!p}T8Eu|e@7iE~H50;S8~Vr&9Rvlrv0Of z`s~-EA2r9OLMyTka+YjQsNwy5%kLfXUeDCi8%d-aNGT@UeZ%tEC0mO()#u7psp#kz zkE1DKTvUc-tefu>&){d?>btVZfIL~<6)xSsmc*DSdqJCyk_e(v+P-P_gCvSWk9~Ntk6Hl7qYMhp9Osp}z}9 z;sLBWp) z!o&D!(1#W<^-WPz?d1r_B4>9|N$j@ZAgj7HKGKGa{trO6qwpXcpy!BoWp_Rd8VFGf zMcXdjz00^H7!SI#oSx@%skD4c+WyBo`)jQ7&go8zG2L}IGT)o$0o_SdF?Vcz`GVZ& zcLa$e`3CH~WuN@b0~5RP+Rv7)I!kK;&l6Ze5^Tak-uUVcgR?dxB9%ncd4azkaasWo z4!WLn(o5E(5MU<4*cio^O*aGNn{NR0WO~ZkAd&xXfqDRb`QOI?I;*n%v=I?cFVmBc z9Z|I-Ih(m0dtrWzyn6ECCzoEtgsID^IeL=fA#MiAhBP!P?`C!70LnV`L!}qGuS6yx zBF&0bu`{I{7(Sn=wl;BdFEbiBN?oeCx~b&I`^?p{@}Zey(;I3zk*mUKUZd)DgDR@> zta9P}V+|=}lp?iP40$k;cts1hoodQh>^o8<)czE>9%EhH==Zp&8v0a9hRecBGU81p z^|V*W+{j`|SmW3iApbZgtMY~m;abwqv7Bt3cHM}8wa1uG!YOge;7FK%ga%IIzmM%>*jP8wn)iH2=6 v;6V@bB6p+pH7y`S9K^1FrT;gC3<;ezHA-7`s@0rc009@lJJ72EAIbg)+b%az literal 0 HcmV?d00001 diff --git a/docs/project/new_branch_part_2.png b/docs/project/new_branch_part_2.png new file mode 100644 index 0000000000000000000000000000000000000000..f94c2e3227b71c72705605ffe1455815a2434978 GIT binary patch literal 144655 zcmeFZWmFtZ)CNe91WOg1aCdhYY=<}UeP{RI z{@6Wx&gS%lp}Ol<)$Qu8`#kq4LS&?b-@;?VLqI^h6%`SXgMfgfhJb((f_)7x(bm5w zf`E7>V#3caBg)SYkg>HgGBG!VfS`!c*U>={rJ)$m)z#4%7^S9!w{?;W4i1;o@#^aw z19W!|>I?u9lQlF}F_BlH6uKdDWxE>85W@kWYPx~ag(Ub|q=W%>n?6fL^IslC*z#QP zt74+GQm-eyygzX@Wfl7S_%Of>DA55aA*wjM&0~nNa=j1P0FLbbZLfNyd@G~8r~RSX zpypV;&3!h*1+uWhf!^lnBqV3{BrH=}$g2)ES7)-Y%#5tH_91{(tyn%$Q$e_3$1z|b z&5*=6g{(Dnpj@%9u~v0Pq;HH*%zG#(iV?zMo;N<SZp%ASS59TwesP8^v5UrRxUDZ&fn_$-}8LEgHNlHRcfxp8-yb3abfChhi z1^!@zKM)X5v4Ie8z&}yIAAu~$zh@z-v!MQdhY@SPFn*bb~yo|Kh?p%aS?s7x3^{o z0-ce3~X#{Kzc?XBO@*N3R*iCOM6{sT1z|P-$wr05iqpVw>7c0H?gt= zyx7&%vvRQKA|iTe=-N+Vzy17QDcI56@SMPZH;o(qmKY)&0)iJpRDe&x`PE@MOqzls zPM`L#d>r&dHP@u$<73O{*Cqla0tyPve7x+?0u$0PGN1W)vG?BX9UrT-U!-jCc< zr28z*y~^CMGPX=}TlcC+GhV*o+7UXF+`ExW7V(Dub9C~<30(bftWqW6h5U15!O4`K z@&R6<{60>KO-2}?;GQt=rJ+>1C zA>qZ7hIuC3-yF}4oJ9J$oNvXwj`%t-+{wH5yhnaqIH`-5dHwvkVdo}aeDgw@+cBAy zQ3{<-PN85lgV^bKF=oh6e0ZZj>X0Q@@PH+jOacJ)om-C?>&GDXY>N;L_1 zG&&+m`Fy$YVZynlM}oOVU5{XK4HNlbgeLx%yM!+V5DbmC-J28)DzhhxDuX->Ma#@< zpR9FpJ3BjLt`N^8Brk>ah50azGS1^t~W_$L;76zc6Q0N z8I>Vx7R#2@<0rQ`?UC$^Wx&~tX>xdanm`}HDlBsGmGbtqx!=i^YqW3DLDNm!-5hJW zT<*p!w%TZBaJdy%lqr5Iul+JDlu`U>6oH+*g+`;GB9=f|QYKIyil!oFG=8szLMmmR zsGO%_V{+l?rdoT7fyMI0o6`eZp@Z6j5nhK6e)DD}ap=&>c+T7+ zmBk|8XmozE)kWUr_B4Bx^ZMO98Z{c#^wj6A!MOfvn@7_tq$lPxqyaLQbF@TG7lb11 zNVlzlY>(@qt6v)odc6}CZs)f?qb!LfG7}%DI%rjCaGtctO@8LbDOt?Jz0Rjw;*Rzt zxT$wJ@;X|*q>S6TKiMAiyT9S8KBx>#pq5q0;B+DK8Nv>Tr;yb<8&6ZsDAr$#k-NVe zyqvSElku6hSOY%Csk2&%g~aCP^JagRYw(1!h`?d8Z7d(dvw)BCtU`Qw?5qPIdit$8 zXeacbXH^l<`3reR8mSayG9;@|6nkwAhT_q7wmRE2t38(pT<=$XmQZdPW!8HmDxR$? zH3Ify=Yi>ITy}Di_-uguVWq@hRU1QtsKSN^+YW>hCm|@LvIW<;u@gKcoxV0(=Yoc0 z+O$rVE0bF|1Wze7X1eiIir7sq7tM5LH)CaeCXa82r6bes;=NC29n#o5KG$06NqtB59N8M(mdbWJmtKO{_5 zy7=B=z705@0AqBjtUdIGi#zrMrJTq@;^@^~Z2qvw844+>`t_mhI&`B&Lc(xTiibn} zs8G9KF%Y!_pIWurC=S2P9HA+WJaTwIm*3&>)*CI?UoV|horTTjq0zN(W@x@cDRqaI z^(%{6c1B6H?cKTI-TsC2RZ%gsshEW!DIciO7-@8X2f*vZ)%$-&mpNHk-Eo0LpnI;9# z%Nw!N56=Dm$b5-iMLHU>OOH|^T%X+!fq2H!rvXl8YNhjM9lgBwKReHns>8?euxHBB_ z0P^$pDJ04nzKmw`pPF2m2NGmzRaA@Q+A`)~V6!!v5Jcu{u?){DRt`4W+bQ!U?~xSK zt47^btG8nzNNw9QSx3utdK z3R=}FlqsslFGQ?bPa-Q*C{ohgxw}frymCq`RfVB@HMP{PZ_H;7B^R02WN&R6Mi%iA z2{w6deU(LcL$_KHuo$&`8TM}0zMDuZ-kgwJ;&5kbX{7_meuT1o7w1byKsF-B(R z9I3{=*N9kD!EwBcujMY3%JQ97Jo^koy|f+=c&wfv83o>+ij39I4Pap2%4sJ7_#iba zma9O)@0oU+I@e?@2GWHabPosKGg?Xg_-^TPXF=NcgMc$BHxOw$5!bqlA39sM-X?S>9-3;JGT9wzhW4M~lE)63Jqu4eqV6 zbY*Pndg~6=Z;LMHyZm7obRPpkV|L=SxJYec0khFIWj0pP{c|i>TW6^+iyvpYcbyKO zn^|EcyznuhG04D)ePo}<>!(i|GW1E^E`_iHG?no&4m1x9D6}2kT}qtC|B5>4herTDVafEG*aWvAw!|)r`VP$Uq|B2 z(GuZGH~~?~lS*3hM(aJIBjQSQ_nKD5Bb&g%xNkoe zDn9_YoKJb5loBC_q0Rayd&BbV?j48LYeh4=d6N}Y(WA*EG718QLN=bo`W-FuJW>;F z?`LR9Zzm6YG>l!Zd{jSib8}CY>AR)mc~%jxsVu#ItGxva>ZZY~eLIuj>EUYXW1xb+ zx?ZeU7dw*7u4-)gt1(1Deg=N{gHkH(h2DMVlgGJME@+#8MqC6L%NmQ&a6IN`t!Fqc zyMP!PCB@_Y)%1?5=i=-KR8VJ0ltn758Q&Y97;!ipSRUB-#9KfW5v%RNRB5m08ix`| zT?T>PNQv~zh%qQunY-BDUh8p8!!2qVnJ1Mh4feJbr4|SqT+7_WVyy#c0t+&S=NRxy z-8@4ohB1G0ELEA&cr>S%h6ZS}4&6jBDpA3bW)gSByh#2MJ4us4bV1rc#-QMThViH~ zQSA!Rt6u~m^<#`=mg~LpHJ;SDZ|LRO_t{$ysEJh!!PbbA zYqE8uJgYYRsrDH3>J43*!Jyo(c0(bxV5q|FBpn{2K=M(w`QCG_$>aR=Fi4~J%S7EC zcXlY7HwWC_z}ll}qTCPQ?9XqJ;wYI0;Q8R{F*YNtLI=>WTYGRRQp#@q_+?Q=9fJPX z&Pam#bDi0WEcfoB?Dfe4wR(g3j2>iStRAWOPzBGkbZ8Ph3bnOy=cZK(U6JpdvtVe`?;=4~s;^Zd!7=U`Pn`xhcEaa6Ee<4@t$n6W+&Bryj194Ox;>{&j+G zjHD1E9!V;z9s=Q)rJgX)!>U$XokI>M_U(VQ<3mE zziczH047(LKf}vFacWSC$dgMd+;A7ZwYJHAOMwH`+VR!G;j9}5uNHAw!P0P#?P4e) z2dG4V9MvC*KT+cV5GmJe%Q<#~Y%0wei!0f^fK<}!MfU5v>SNUt!&ilNSd#H+n1MUO z=F&@Umxcud4kN<3JD$uJHAF<+ng-$u5TVdm2$@c8a^Y#19d5LY3lmVSwN0K!FFO`U zX49?z7bM?Cy|1KJ>Rd|<#ZYZ__QCG-O#C?XHm-tKmif!hY)+0q5&ZdVG2j65Ta&zxthqrc7v@_|5x+@2DEDC;X86%be(Y(QFWJhoc$d(4iz#S!e0e2#JME!k{&1 zyo$TN$;K7TYt~3euF&3^{`go*m$;R|?jmQ`#eg^_bS@`fNGK9s+potffcxgjoV3Rq z6Ap@|Cp5`vccIDY#*#P{x0Hy|^D?KK0U@s$^X^kQGfkmkqqvg7R;bUli z87fnO6hjTBLnnydY}x&yvFa6iC9mo?+Ef;p5Hx0w`@SnP(B(MKW1;aE!?9SwFk%(@ z==#=bjXgXJp2Mp-WYWZ5J{rDL647UULwz>nDjI{MVqb=c!c*zDxUs)c1< z?2HW>d)y}zz3tWY7WTa|IQ0fz)^ELOoENJc>12eaPI_-0u3sdVpXmA)XV0$jox}RL zrb!39uMo0DS@V^_bt%^TBQyg1QzhBE9(Wqn`za8u#0%OwEl2g1;qxvt42LHxPr~b ziSL(oi)+*2uXfQ#_I9eGG7XlU@pK_{yu7fz36b_edC{cf$2|GjsJevhp)G`cf@fn8 zuEu9;(KRGU@|NOR;U-`6{N0LDD(Ot_NlP+y7i$`izVP|j$(x#ShwwMA9aQj!5~(YI zT{L(jA3o35xFvnfwiR9P3;SmDXg}LeFVfWfG*U$@EYTo4I`1%LUi@nav`HKHZ8RRH zSB|Jjz(f#%DbSkpP zY`K;qQ%wC}hGt8i`5in6=y>=^Xa;%3x~sG9HOrQwR?mgbS{-dF>%O~U@$`=sUASHh zAD}(=X+*A(U7Lwo`9rnH?)a@0St>ey@UI&V$Elrn$14QgvH3`9?*2}xG@;vMFF>j> z!kaeRcj*0hUz5?w%38%??^da56IDKm-tu(yDVnex_G{=fa?HG3K?K_s9s1?^trO%c zzE$w6TdjDeu)-H^W6v%*p~~w-stz!N%3J|z=hmvSr6q-hoEix)&0gdKzXzjHpYdho zQ)>$MGDhit@UWVEejpADqZPHU%@^F1XF2O6`+W~#1HRg-{ucy9XvRm+#(+_ zI;J_iB80H(-YjBIc6^fNIe0Z`OEw&}GCoTqL$|2uRY=*9+#YJuA~Ku=)9=0uCFQ=F zbfbbcK6!EhyhJ4K(c!(+6+&rzZnv*O&G%0?%>By!)=Xexoz!4=U}7@cP8sh(v+V&H6L0;YuDKc6N zk#uo+EM4vlp^3?m&+>f_wqnrJ2tU^m-G4TbfvB<>0dcf07-(|eh=PN6Kn~n$P+py2 zZ?)90>Svy(+Rm*yC37_;nOBe+%Fj&-(ontJ{_wBgsPJr$r|0;8hr?w-^)+R%BQLP6 z!(^a_?QU~p9e_W(h_bt6uJqoNXL6Iu_(XPpZO&TJ|61Tj6wioEM{UEND{4t=a^1zr zgrBMFh00G0ADtz%j^~`DXX&prP1WvssL;RYWJ~td8ioryRS=K&HtzcfQ&%u0T1o(F zBj!HzjQ;SY5F0`f@izKG0gaZu)rQ;Li!_N0MB8xWES{h>H!N<0=&wYc5@GSqrtnM} zGRbEb&4y~q2x}J;ZDd;s*Od&n zNkIs}s)-*<=P1Fj{a(AnD!DRxr&Kn0Q4~4OP|&X3WUJcp9*o0{ny(J0QQuosD`P~( zfjKH(0p!llR+=r2j+jZ1UA~+ZH53{jR^mN@-%URmUFpCx`3Kd7<)(a{!OcQ<<#;;L ztt%KZ;1cON^20<@hui9$)YFVCYqh3bQl7yguJ2aDkAK6bPh)IJ9hC&XE15W?-Q=Y; zuw^G~VOZsY2uYiw<^<)}*yaRDSiTw6F|((=j|tr$g=bMiqx?><0HKsNSrat|&VxTZ zn7IADD$mesp4wNCvrBMkgfrbW&y~MOV`>rq_X-sOWG^=stepy76ja_)I?k!#{Q>dCOY@gcZq@hmZP0P?hhp z@CyUl7g-IbWK$Py=$c*8_;|NzT+y^jN4o_6@{7m}(90^N0unVURR1_bIOzA495}(= z|A;@4PE=Sh@z|}YgY&QK5G6~cDlCxvUje5Nm>ncvgsJ!W%MNzZf$7MWFV4RI3jTjH z|6K9^`{s`;{_oD4(@-2>m$Kt{vo(>+`DQFhNEC@gvdnsa6fg>(E55Un7emaN%dFc! zB+vN_#dd#6v<2fH1PV<`YT$G^@3G8i*b_K(=8ol*eCGnX_2Klo{itF}7F*zA@)NF}nDD3~+K#NHakzA^QV#NjDQ`_!nUM7s-=#t$M``HVI%e%*bw zxmvHi)wN`{o>Y_-Uw$8gyYigG>0%F@`~|aFM$}M|o=~!bw8zpDjHTRg&^zw8IZ$pG zGef5&@!wOAddo9zs31T~Oa`Wna;?dgQcY$!BY>wSBuaSe2_P%un^cRyQPj0%qcr5g{0K$#hg2NI0zb7gB9={NHI?vyTc z_vl-Nc6XQ;(gv5!R)zI?doBe z&9qR>smMIVY*xo&n~UrW4X$~!X9l+mz&O%87KPbuXf8Lk4h?!#JcYYrlX6<4db@(K zl6$e$R!*xO(YD`^T16e@t{@3oR8BlQ6t9O|kWBOVr*dLSXvCY{gAIxj$sxqM&B-Dz z6re18`pxB9gwe@QLrTKC;d+#34YaMbNA*}rW3~`1^6bTSPbHLlqv-Agju6AATGh}J zPPEO*JuZ-Y*ZI-rgrB-x>wFN3a7a1z&b#6rz}HQhdEWCRGvbsBCA5=WhJ=H?!txsX z^!8YG`Bx#CpAsx*v8J_yzcF5c2GenNw$A((*tHj-+hgUMD$Wy43qZT%H<-5{yl!Jq{X2nyq{BgL(y2J zlc*HcTrYPi8?07l94_~MD$C`Gm}K+|^+%@XyGCH!Uhc}GMV)MTbZ%p$(N#Jn+T*O%7;KjT#DP6v%xps~zwt`up z8nEGEVoC0_iep*i@tXd6s*+L7CYP)DG_GpZjH2Y^PgFvEJWiy!u!Cw@1qz;4!6?G4 zo8{QMB@NiQ8|twnOKWI>@NAz}IlGV#N}*H|Kk8wz45# z^%Psgk5MGAm@%XdVJGOm51~eXtL}+R64(jBW3BqQ%dYM&Z;X0LY%(-r`mFA-+@G^N zFIT>XK{d7&$ms=A*|$$7Dq`H6-MRbyz^pbwcheg6Hiv;3H@Ole3pU5YLQYTI53tze zQn)GXU}Vhc^4T~jAx|c|d$P?xfd8gU&p7-g_IJH!}LrbmBl=uZsB4CV(AO5N$ z)oy<{A0#JGSNJ-FfZ{!sl|$i_&g)7hQI%XmW{g zz!?oAHX%-|R_V;KzH*Q59g=0DV!;L+c;Xqsr{=aKiibsEh=29YF@;#k0Y;#a$2`y% zRMpqxE`myG2^ytT<@uB*fWk+dv$hDx)VNko(cvu@nEt z>w~%7P~OJS1Z;4&VKA2T>M(VxNS^v;R28iPgZrMjPY)>M}^JIzgmUv13C3B-ErKSmKE-*p!)Z9fUtrMFpY+GXPWUeGc` z0UEvVdcrIg-8#2&V;Nk;4VFvr6J`fp-3}zqLJDXTXd9 z`lElGF^uG5*huQ;t<5^KV|fWXvR^#AKHr6DPdI?PMRsTr2*I7MbNmxy*ATUd#Ev7`XK^>iYZ{1MqaMYO2VO@bIjsDsew;J7|1 zn`oQs+s&rl_=PsNoBcz{)Z#fJIFv8c;^29aZ#=#%#DIU-W;6GR`L=(W0TUKB0*k{3 zf6=TVlScTaRDnrvj52Hp70*ttybHrrt){|pb`!WDByH1om~tv&jdQeA$veafk~>c@ zDKb8qGUwnMg>5!LkD3#9idQdmt~;ihA&rxl$#Bq0f| zTqIpah`Cx$r=|jYYRFjou-;RW&d-rpj3s7qX-)=>NNkl2e^RoyEN*VN!|=x$!Mu&l zJ{m`?>-?{g@U#3yoJ@XXWjhiOZX1BtCc;p)2FnPAB00tFu{)L?n}c*X)b>QJHqgLS z5!2;7b!GSAJQgme%gr$!R-WFMX2(Zf)9Lau4J;pa2d~zqr=$Ze}3^ zH&<_I2Kio$q|<^{I32vtWf|gZz9zg`He%s-1Mp?SPykfHU|2~B4`kS zB}yoCMlU&m{|;+bFp1!VYV7fQX#)RG_dZL-1`dPig;M(OXiNn&4=o`z+<&S7UL}EV zU!#9m$_v-`-?8{2tXk?(CHPBnc#DJYokn->$Un2kFXAZ6_0p(6RYW>T!JVU9(=+;K z)(I8dGONwZKXukB3b=Dg)(lMl%(j3PT8*in4*n^utV6(^6K_mT`Db>83q>YMJiR);9A`T3;iofFdU|>VCgQ2;STYN>6c)EgbgG-AR<_9%i75^$XJv7^UWzzcJ_{>7Jf&)Mcxq+>HENSWvb-EU<;UZ9X# zj}E^N6e=Q?OU;xu#u7;$Za0nb<6mTo7!C6U2IXS^h+j&z8gGR{bxhQQML8lq zp9AWccRSlX&L3OZUa+snQ>qCQiPf^Z>uHPU(O2B(n`8-0Rv~a&?Mq@DsTs@OL`lOk z0f%WGE{CgtmGMBbDEIMA(Q?N5DD+$lc_9I*cv8$dshC*(;oMeF2x_&*f|6>JdrA>! zi~DKz=1{u&JN!=ouui9>LOPi~8<7C7mdkKbV6Mfj0rT!jcJ|rrFuC^2l2E9e{cwX%M0!KgjKJ4$Y_A}DF0!AktN)#WA2MymY;+5A>Gp|aI^ z&DZ^sZx-2N{oQ|$nYSC9K&TXT z>a77n+OHpD!5}XAGYmPC@gNl#^s#&$u=@R0Ln21jt?^m!2_eQ}GMrrM1u7rdA0J{F zCM1_U>uB;}^OzK)L)~tz)C)50E#I90<-5&+LGS z6tcR587E5ZYh1Sc+5XI3Gw{SD$`Abz|MUpjf6O4bNvPeM+&hAc`?gn~{^_a0-h5VQ zzQF{H@t0!l_kNn``bktG}X&0i&1JF4Q@ z`qj^tspis(VQNG;KGqvlO%=sSLMY4AQd zjkP!iFm~){VGuldMw+XshUdr(30*66xoUu!S4YImXxe1Iy*N&Stjj=sw%(gmDC<{? zyvU}TA73N~_Y~cp&Zaqz-p%s5W@hL}p0_FB)NGEiD+1C9auHRrWHx@v(=(RJVOM(~ z^1LhG&lafJN`b8S^eUxJ971mhD5*ABR*D4+!7~3^9Bz?ZMB9Bvy2;pf*9Y$&S#vy- zr@z3~gDVk-)mK9q1_Z!*=H`1Kza(9!e~p3A%0{=%oi1g`llbl zBc*EPi~8kcu%+B$(DMkUfik7q)C0wk4R051MBUx(nURec^m=~WkDb!fP$3KRjAVWr z#sq6br%E-IEjp$Q1r>N~c={voDS_JU@f;0SDT!p2e(J~Dqv~YMZd8`=N*w>oCwg1w zTjKLn>l<-XxVw0Somp0Sk5uSABKJ2()2Y^}dDr6lxHBu?q%!Dl7(VfpVp?Ke_|IF% zBUJ%JDCG(i=nCtW!MK7*wL*N5{a=KGE4J@t)W>#5BU{1FT6zvrW2-hOVt>FNjT zPJZ+nj-pS|^|Yyka-DX1o7FB%H#5jApi)sF%oIs!wupkoNOdhF3U^|^RF|pUICH3+ zRCp6b})>)4?x$}Lqey2tL1rm)(RDM6SS?xe4ZR7J10TIcFMnoPpd;D~sd;=om* zf>X}Z3u|{acawseizz?98G_UMpV1?zJW5`Rb ztZ(hM;7#UB1Kwr4Rp@w%c>-M^$&2v16Xr`DeWO+AQz-v9H1B7C*jb4S=#9Wo_SzsW z3QcfXy3`6&nk?rhr;RR<%pl>Yw?c(}zP{&79_847Dmtc_#Pi7$X z(+j>lsmh(t39xXwL3LpK9j>+FK@-s!!7M%N_u5LnsuAjwx)mB964^c|F@}AS6FrBn zwAp<_ClrptjUgX$@Kvah((K9#TpfTJ+vy(w4<;|?+Kp`AXu@Bh9qxm z*msHD?dEW%Uq`lBuB>@^EQPuOkcB>&dNJ!MH?i7tK(ebD;=XKLw2v@G(^0QG6cR`2Bo14oj_;Au{*_5Xx}eYuV4Gh zb>K4%oljaXEEg6u>hbmg+N}2&@sLyHnp7X+hv8<@;q?3WKYhrxK$cFXdiy?_SMq(J z_|{E+*ywc{yMtT>p?r>liE5Q=*}EP=o-)+ewv7Vem^dS3^L6?`DQ}PFt4(q)DtJT|XDg&4&-0Nkfgr!o1ln`sN!!`>g!e_nRRm z9hK#W^5}tD1fGC}dP}}UYia2zi3Fq;$4N6Uoig+hq7V$$46Wh3abYeAvv>1>2~ra* z!QjlbD{}#%J`2hW1HLIACk?^N@ z$opc(&H$xCyC6r`>-Qk2Y~Oa?o^S#VwInRnM}PIVHQv@I$U6#+BC-~H2-C9ozrjba zJ>>gliZq9lRrW5x$;b%8NUV=wFJt3Wb8GM~VO)3(>d4nFU}pepuC84l)$gnkkC|$RgEgzhfhRT%s~nBPwT306l*roW>;YHo>(;Dy78*!|^g&tM&)>PRySL48aw0 zK6S}bBY$(b&xVlKJ!1YXK2O6xj;`9|rihD{}RK!2go)V8qnvl@38m z9din;)8y2X1(2D+VihO(7^4{b8&G*K!2PNz71%WgG~Ari79mqFVOL-}i+>$4`f~wq z-uo;Zipd`}djF7-F&J=t;tz58hvSGMAbQ2}vWahCUxg-! z@Q;W7LuqM*U_H{a9kL*pqO<_w1tkZ!6qX)70kxD0;saF7a*X_RXXJ^brA z3v8ogsr~Mvpy^aGt!A@B(kGjZ`7fb%LF{1FFb`0T3J#U*BQThN$A@l#2hhp69T~xP zficiY`2DlOOK8Pr2dZ6cb0hCLJN;FqsO=m-%=l+6bjHCo3YJ0}j1i?gKy5D0F`;k? zAr`3=otrV8NoIV0U@}Gr0#9b?h+FAKHcx;>Kl9*Z7jptSM7}rIZ62mD5k>~}N0x!Q zb2SowXFhl#GbB1`i9N5>>OT5aa6e15r?BuB&z91Ld1+$$BG7yQLP8nxK97%V+?`_} zs~BvLWRQZ@D6!eZ!lFLD0atf1`u&3m)Ja=EqwbUMs$>hy*^0DUB5y&Up=*l;gY8<% z^W6zzCTT>{MAMmaupk#rekMhr+W1B)g~zIVvE4y@=j=k9IMd@U;o+no28C4Y!}N^W zO9I+_xt7mpJeh7hgDVcaq>HRb^CYQM=_8b~$s3O)DtbK;B0S&3O_i%_gX4NSsE$J* z22kUNA+`g$k^`A&tiU_GPg$Uk@?dG9Q9U(SecP<*aaxJ2>P;~ic9F#S*YDkn|9mw$v&iJ4k%fat&)=n6#>-XN% zDdh-7P$`y7|4>Ru=cpfEF`I>5S9bS^Gpo}5_`&b;Vh0$m`Iz2A9)g0_Yj}6kvF8CB zN-m9maFl9#KQjvO(bZ6>w0>{0(4Kp^)F#eoFhJ2{yAuNrb45G2+w9S64F`f{=KyOGfC z&iHE%FD?>+luf{Z>gLcQ(hfnH7RO&~!HSulqvhyc;)Ov#OEef7s41zT7@JzNW19+e_5FU*!?@dq1S+Yn`Rlg#qtqP$3OU;;D0oi#6)w>&K?bm9>r8 zoQ_M~p9cHSRaDKTGTHMRlnXuQ#NMWUoc?UMpJ(-=w|^1W?-f6F=6QY>0--bxygOW} zSU9&C$7dhc+magkO50*EkSvQky(5-sUKiC5eg?avAWEr#+MEf-Q77_+{sc;iA3~r3 zjaL^B4BMo&F%x4L6gr)Z%CwhEPfeM#B?&0ON8E!mqy7mMiULQA^-_BTI?ncP@Rb zUpVWK92_w4=#$2t>eT}yRTjfWl?*Aoc0Jzj)6ufXbR1n)fhsi$*S%q)a{!C|vbvdzLm zM|Bf!SWP#4o|bz=qgrFCjAR6-S^$1kd~LCbKiK=OHqkhjlQTf?2Kz+lp7@1Xacc6fpT%mPZGGF84h%7{Hk3Y>2WXmJL2=}YPQ zBO@n+rf+Fx!isuB@%kG{G}Tyl z8V3j`y*dF$2A6|M9M0i?7A&GrUjHGI?=Vv$ zl3TR$t-*J1FkorkC>GEDIHTTOjKG;Ct;@{JlS~>rG}~h|^sA>WMF38R6Xt!g(ZpbU zbA1wbd$xg4z@u-<@HCdmLGE&aCc{_Wbx;qjh<$CGC8vU{ABnPmd43+7V@ehs6Z=hs zvSw#A3A}S}kDtwMYo@&Lg`H?`bA#q{i_?@_t~ zrW3H3vl%~qI{k^Xa)2e`7Zib9`Y{a6DyUG=LAR^(Qb+kGSRf2-HekOG@-Z0BSlZq~ zXn&TPy_>^;%Cq@-lc%)g%4pfm8mlZh%x%Bx-M)RA?dMDRb5y-9k@N0c^j<1G-_kE@ zRekug#WcpUM|Ek`!k^V~6%Lw6qV$FQQU5@H-><}I^ajhUawLUWd>m6OaYR}ANDy%7 zZ=>%^7ZA0p2=drNQEk`;QYtow+h!snG8KGpe7BMW)Q*)pnjZo`KY4;BEaz9Oaile& zly^oCv(@%C))1vz%#ro|k=W${D&NYN^7wMeSZav88_a@kwtiLRPKv|Pl#<8w^b3Qo zK_b$IeS*0`NI%Ugz9KsFn9E<{n<$0%MPk_{28hM+ z+pp?W&9c4HIb#;4G#XJ6Llm4oqmMkF;|RLNr1^<-^D~vKG|sm%ZD$;>qR6|wSH6l` zC8<~GbMy%Uv*ZZuc3r}o{=;dE43BDM)|X)N7kM<11&fRO`1SM3bp<>-Dmbw_m_nSo zIi95axHa*{>3E))!|5pT@y)wEhwN@Dza3Ek5nNArssLR&7jBGVVcpoe*;^gWkp2*~ zgV(m1FH?CL9kPO2soDT8`b=L)Pxq|S+>VhDA}rm zIMIpxBJfIknN8MJ=ITu%!tt0TdX4B3c#?qN&3Jy~65fyC+_Ol`XBY=KhJ@b7$NI8M zb}gZp5LEKXn^9I(xsB*M%^D$QEGFCFCW4v1Q$nabv3z)4!;9Ms@aA)`uwUTh4|ux; zuLg68O?RWLYZe$-wz$uDWtq=`q8^%^i=Mu-M6Th!{=C`CJeH?GDL+&@xMBDhC@Gg? zFGwlh{x!Z#MsvL{gjSv`!ma=|3OuEirA!tnzQCNFewhm5x)-~__ZZ2+W-cVZJKtux z(Kh}rUdG30c}Xu?N<&b0z#M~ONJY6kZQxg(^>DgmE+4+!;-w9bMory8tDao~&Mg*P zf_9;pYd-Fx(N@Sew3LU8B)_j{JVSBcDvzxGFCceLg6k1aR>ILKn>D9xwh!jnY=9=KUF6Ti)5YjC+lg+b zvqIx#n~2%lSA?3F=tOoRkMgQr^9RA{@vZfIaS$hW`@;ij-9xvFiA+c2N3V()9SCl_ zZH?eh+`&GH`&VRd4`$IC8Q&3b=d4s1eaKnJ0{v<>WEw1{q3oAVQyBilIf>MaRtlq3~r9w z)Ea9|rZw7AJ9v+m5t_J_j~CfUYfSIQJD?@u8Gfy})~&ljDsFcIdE{?gYB-gdtPW<& zX#1bI=v1d{G4ZB65MfVekI9KUETiKHCUW`{@14wB%F}3wM$Ey#!JcVgV*IGqi(X}Y z0bGqKya_{S4iQOH>oH)>MV6#04igG_k z8%ELf9!9G%#G}`9t%j*6@F@uh27!Pv7|J40x%#(2L>Z{nx&zkD7_}JpzSsJ6C-~&?W9j`xAj#H%~V%kX3}Qg}>Je8aZ~I#n?5Bthe9#^>_3!OsC3uy_@+i~9>@jsDZ1hXR&s(o`3xfYZfw6~YR}{}Mc?HcoWYqf`0)-sH zh&}d+6dG3M~bt=Q^cO4B8whliXsRc|Fs6Km0b@a`<_TI@gG8 z*VWZ*nJvGSEM+47@%G2_qwqn~K2v+JYS^UE7WtxDX^D6knop0U^s6P_qWi&&h3ego z>0PxKk74hf_~?hXuA`wgl01f2T}0W5l>8uj7UN-+?W*O#{34;Fz=B>#8ybSb`-PjI zz|tDmL+;+t{$*?ya9FmgNsg=uqfFE*dz@Yw8`Iy|C;RkGI&=TksPjf^ZfhmuNXCU` zFb%cR6Qw459cvWfnBMF$*Y=ZC-4bsT2F=gr4%XzBmZ|uIJW-FSid%#AGNXDDg<%c@ zx$<`-)(j}G_aUAo)Ji++s+>hnziJ?V`YsfxWj53}GsEitc@U*nqKeV}7IQF-jrZ|R zd<2u>iu6koH5}xG$o(}QvIKBEny}4Q7mxW}>WXxHRu2t;SXXGg&%q131rkGUYu{ni z!uA-{hd##q)-LvN$ zWAuifZ)gvb9zkC?DHFn!I4w(S`89HoCX0SJ-7*1!^rJRl>U->{iQ&Q3Mqd zaPJzQLlRH|>s`qUlMW0V|K01$U1xQVA{I_lkWAuzT#fiJ zhqxZ-WLG`83*5_wl}L4#Nm!{WU@(?4XH(co1OIK!Q{$D;Zz7JlFR`=Kzf_|-1*WpY zJ(qPLt=go%>u*^$YFJPM(LXL>-BD-0U65%IA_1)$IQZA6MG&V~=kC`~BdOmftke$W z6cHWge&)$y2E9_UQ^~*fZ{I<7>Bqt@lTOVzY;RFNPglvwzFnq=b?GT=z*4>VJxYeR zR2L;I;botj$Z$cpYSd7f>2NS9SH|u~*{VAC+q&i&LErob2CaIbvG7cZmO?1?@Tl75 z#m!IdhB97n;SqK5IJ)rFAkcs^coGsW8-me<#XJ_v0d$prITgdQ6fWBWrmKo#vjqND zh+FiPF-qjcMW-;6=OE+?Uw6^({!}TENGuvkMZUwcX73l@2nm5SRODY~>Q8f=WaEkZ zUiv(7(s+yCsEC`?>7mcKzpVQ{JUpC(GR|Gk!Iqp?qBVwVUy~l~T!L@t60LVe<5meA z<1k8=4(cODG-?xDxNOg3Lh4{c%sV=;tG4GI4&pIYEp$=3?`c}xJP37)eAEzARZ6u% zfLTR}DHz3ndXb;X?(=*K$59OWB1ndh0vV#KF1dtMc6o>CKW&lm%w&D-uyuU?69~Et z#!l$rln@5)xRHy-Rq~Fo+qUgF1sv!HBtn6rAp-1nNCa*tiT+?ws(=Sbhy$D z8ug`muIZHVP@h?&+x?}fqs?ja9p4}z^FdSP^2ipqbaSiG4Glzvn`t4njT<~wnK_+3 zbV>v!6IFR$G3Yag}KxFFUIiiS*rY z$idQxxU?;0iAE;(<8dPRd6yVi6x(k+uPTF2%*^9uCC8slH~EeRM7obtOx-W7Hu=_5 zB(qW;zlM_A&CzTe!Og-OCTbO}xc|rp_|B1@a>jz@ibLX}fN1;YT*Dqquv$Rp zN3mR`*~T}t;n2&gc1;?G4t<`IRHN47C#}YKVUSa0vr^DdTv2yh=t(~x}0vJfiRxc3*UT6z&~uQbv&YV zjEbM)Qbjm?UYXTaE4_vmr(H888hqXQT+h#7Og6gXgCTU7yT74o_e<2OYNrd)?TARp zLlHnD%6Az|!s}qo2H9Fo*FTx_%&l$1+dm4cPJo%k_RQ(X`~aLXv6EGTp=JR2lg8^g zl2a#F!HTSL1LOB~x&DkwmQgIfHkL|Bgm)j?&2d;3TL*7{;FJP4_2j*1 znfrVYt7ne)3;l?++C}i;g8|zi-Xbh)KMjbwsWyIwYueK(5ZKy~U_ZO~&}NP|IUdm2 z?~J0Vezq!N7XlQQdM)f0?6$~Km`Z@AE^^P-+rDez$OIThIJ%34D&~1nU(IX@5g9}y zfFw5xuST6!6u@QyP~|x@4#njANSP|bOyyBz63PW~S+R=6s$%p$4)QICL$GxF;~CMK ztsZJAMHDjt0hxIJr&}n6U(729qDs4()pD`gNMi$|oqnIwabJA3*;VQNjlmV$(0ZK3 z2BwQYbhsj}nRuyY!%RpN`&O*m_xP6c2_sE^?{V|F(xO$TL@_A^ZcBDTq|)-<@T9^5 zTLA>>jr(TT3raavMf*;Heu| zjeyr9{bG|O$8F04pl;-t&6p(`Pv>jPX9$vAotHou;Tu34$C}L)7Ge58(OVUV5c};5dKR+v z!_5KYeCv^?(ZdTDBr$V4TPH2`Le-9J8A2o(m-DeK!u2Dq;b1I+tuaK5k8f~=B$f>@ z7c-tKn&@HPLXK4R-2n1ABNn$?OAYoaPZ(psZ%nGZHaa_ouy=;ibe{a46_INJI){p1 zW@`JTRW^`nDD=I6xH6i^K0j%DDHLe?UTv|Uo>r?;=BUBtbXe?FP!|=7Nx$zM|8Vsn z8O8?4s_~12xyg9D@?R)bMV zMw-QK;HKsC2#f$U|YZP${ft=vUTJcA4h=Jn*PQz-spX$jGk-YR}(_pzzj@(T%YC zIaewPP#iO@wArBF$J|o;Aa9QIAufU0A49a0FITJ4LE&NQ%Dm}lrifmV07Y*rhBZ^G zX?8m_SDN|&r5O3kZJ*m>b;0=<-%>TJ@knCZG~e<<%#VJ`B9*er1$tGn>ytJ5XmanY zq1%fwkoaks(rg&c`IHR8C8G+jZbFASO|DE@;gS!p;Ygwq0!mlsS2gWsmoos;kI#9s zKjS_4;mVHrH=o1qk4EcorKRhE063ik3FT7F3>dDV1KK*hGkzTglV7|*)MlrlFp^%$miEN%SD|uq~@yrC}iXJyB+(~Lg&xDn_a?J98n$5^u|1YduIdz#F zhs))S#9GTWWJe@u@8+s5P(V*bqw`6w>pEw!sj7Q#B(lDRT5Q(N-XLg5k2i81NuoDS z=X$PZ^SQj$2L$=D!06h&;)IgnjER!yA zh6e&3x9;~;{cUzRsgI)FNyv_j2#ez-Z=W5azY z^~jPLn`wlEfJTgMd(kdh5r%mi;Ix2c*qQP*7;E^ZZ~Z4_bQ`|M=~LUwlgZbaJHq#f z)DfCFDjU@tFKoyBFzs1B1^|!Lk+Laj=h%I-acK7JSO)-F$}; zJt0gm0a0PT{r%sd92H%{q!~;}l%bz(hX|Um5NS)8DZ0LN_>0Ut{=zAScD*p_XIH~9 z+`z@a2!@0uea!Ma+l2a5LIW2e-w6o^iWj4aJ@N~MYfIK4*YnMy)8gE#|4}*k^lgD} zDE7(9*`~G_^9o80-p^&7SG~;y!9O|+k{VN=onZy@I9@?KxWg|H(o1z^0yw1}!>jz< zda7xCi$IvsiipjWUk?hsXMxNiq7tQIPeE1C^DN1FYI|1CITq``&2MjylP|0`XVyY# zGfZfIM{TkwuE!&z#}j$$y%`&lP#M?x%S-iz@B2$$aT>P!RU365!E#d$lu5G7cT)A@ zbnl>5P-_Lji`KsxT<6mME*NKGB*B0X^kH!I)<yMx6sJC`9dBnD=e> zVFBLDy7G0>-%u)et;`8B-@M3L&6f)xK#?GWqok;9jrB)byI*w{Yn5t3J#Eq(d@}6z zCYAMr@QvmOI_dED4^+2Br`JuP8RBz255VU(9)Y)$A}Y&b^T(sXxMhZZo-XjG6|2X` zKs)UtkbDmIRjV?pdCS@3YCLrYNP)F_+|@h`)h=m zun9Gb{>TWN9inyPazyLO`Jh8Rdk?$uy9U4NvDMzGUzQKT6=`3B*+K&Cn-VC5 zWq3TSqKCu)fMzx*k_be;-sy5}ITTe&ywLVG6!O{m7?i3U7C+q@;##gaM$`Dpyf@g* zX9ZzeHy0o85aPA`4b1@oHqJNbmhENs9tN{Ni%bEb!$vJwvu`GMr(KZ_FP&7zM~gL_ zwdD5OG*sh`{t0-V!sGU{#S0EeHmzR%(78p##jOcYffbC-$90@enRV(aWPqo6|4ef0 zw%#t_w$4VwGut!Y$T#cy;pkUGX@-$>hvzqAl&a2Rh2F4)OB%)xqP&;h{wW2&|20r^ z#o_m<)Wi3kTwmJ3`EWb{Flg1XEgxaXDPpE{u+2ZPPMcW%05xR>rL)I7`^84x6)ebk zqSmjg+|2@))ihd-s-f?%ViD0X&-j>gA?G^Kn7p5zCu^2q%%H`U?jA3^W_uQv(^yP1 zQ;(QU#!;S-NpcmXQ#jR{3@x;E9T|$lujhOVBF}_4U zV~DlIFLxMDm(RH6HD8Xph#qK+1vVSI7pL=__snNf0)48uxLV0Sn#rxnO`+y|U6lsx+s*Y6|~Sj}^58*!d~ z{Syl3AYNxYf$pTItyl0f5EW#@-N_OT#W2GV>DSD-(N*$wgm{T>leWv%jzJdi<2wlO z#hOKwEUN__f{-Wt%n#4X=@2YvRzAe;bP!S)ZNzTr#nqJ@v@2TIqY*A1l1=lNo+~Dl zXlejp{JI1r&lXGcHob(2M(rW{QY(X&cM=otXRof0WmM;|cT!Bb?jv`mY{+ru7VqC4 zxOTEe0Q*EAHAef=Y8Cp#4aAwutv8h4kMFOUJsP-q_X9h?d}yehjkt=v>uq?8xoh`3 z-kdfHLT!CgC_|;!FM2NDuT^Qy>R3|dxekCo_w!{|=&=Nuf9N9ikV@25KVu)%cM^~> zEtSp4e6F$jUGXd;OS|cCyw+4Uy8x$MKgy}2>ghPmeo)!D=q#L{PUC(j$1l4uaV#6lM41y?6T+am4o{t=Icowy!LZIG?^?P#xxmch*+?f`9u? zEmbZ>3H)>*_12t@H)`>m9(|43j56)BqZZUvgwvpzZrKO?e-r`HZmr*)VzG~VKd_bG z{3mhD4}QJO-!Wy?AN&VZ1~6FwP391o)%R@w^vNqmiZC94$}u2V{iR8{ysAPYH&a@m z{Xv@XGXPLIU5sVoKft(CegK_@@1ihI`WK$23zT%w)c<8|t@{G#H15;faSUn z)NDfk)TSNcRZn_dN-z4g9{+s&fD*{6b&Nl6_JIL(Ue4N+IzRrc9wfkmf-~x%{qtrp zWKsaO=gS@_^|yMk0GA4F*iYl{oAJK@ussNC_@qBzMN$+=04f&`RrmMJ|NnK&sSXJW z0OD%W%KhIJZI*@Ghqw}(J?*q$)vSx=j^s{veR~4}KDWJ~Dyz7JoiR(*2Ku2+&UvK*w`z zcUArEvZ&V%`f2^9=wI^!@O?HwNe?4ID%l?^2>$vCkMw|0<@q~Y20%%~-6_qV0)A3p zw7&nwZQuWkvi$JMVjB)J{#!tt02mdL-k)aIvBGr@;5i>!1&?`^>yX*#7x&yi)4ok;ndLy8*~&D_C*rUlU{mJIR+TQ29Wz z`QIl6lb!;+fsVA6-%qy_5CFrl+BvoFZ*{*AcESQ2m(49aspgifrIIc#hSR{6XzoAg zK%fU-#|5-Amby;`uu}Ao@u{5#w@oE;NA?UcWE`KLzdkf@; zo??K*ccWd-_NQrJn84uF!-g&Vt;4beFcdA#YPf&;*(Vg}4o%43!@uKqOa?Swd$l?^k3Ve2i|;hZe;nl{&cUE zL&d84JDT-zz?x`~il@p#?l^dw| z$@~h}nV1{?^g80>a{}{0=4N*|F21naXY5zed^@s`K_CuA%D^C7M+rbM8_Q&782`iC zK|2K??;n*jsU!d`VKRr87eFf4NIEIAPUWdWUOQi4s;^J<1=~j-faKDWLxHx)23{7p z5N;_-$s=UA1F&#t_-$#AkKguag>dBr^3etgPJly&C?tdDNSoS*G@(}VHxoD zqCd2AUf#MEj0iAV-LbE`zI<@H85td3lKL3{kP!L3A6o-PQ$LjmS2VIDmXacfkx77Z zlN;=(au6_0b|Ubq9R|v%#sJ`!Tv_D9?w#|&Lg@aiHGhk{!_l%4Wf&C4B4E4=}!(~6JpDBW0ps9~h zq1}vGOq(uI4&FkYF?)&THF;n_!0tpks^1&cY8N>1WDdBa)f#_*1}Q1}96mf$J*>Ub zB!3ZGk}th-Pu0RzXMA7$sT%Z)KABJENC+2zR7*i2U}pRl5UBQE<0X^Dzu!sc#7Fn{ zxI0&%+&=ROoN6VwQan4G%NJ7s#2V3f^fq>XrLItN(@64tq!#3dg#{xbggivGNi6pR z+dKOuEB=EJOetS8kj-+LAHXYSGOY5s@OxLexw^Sc22f5F$VL(0dm)Zr;jm;!ST2p9 z8Hipzq7+-oNV_>e#*bK;sz-rAbkz{A3>3)tXTF1CjDN+nC8 zQz?Dq7&gl=kJL8%0R*3MYRp%DQpHjhc&LNBKo5;a>6lJvYv72*F{@bZPn|G>oO&wx z^0iBFE=T4W&2VwP-7tMsObj+cI@r7OUM-`aDO6TltF4t!*A+-)wug2+m`_dlCKDCa z7{C2%=R`DRsV+yZD`=t%2DyK1EbZO{M@^>N{}fta>-YC})3t{7`(244KYk=>t|79R z?Am~o?%>fu>ELZ?X6S=*dzpc|4^S6-;N}r#v(N3-9G;%Htw7ZbaDsqkTCMm*GIK*b;#)4I7uq}-Bpd*@L$;3hO5++`R`Gm#l zt&+4`GXL7sKV3ytOmlaIS*Xo?^GWwGK#aw5`@^J%(7Hotd6E?K zdsKnF$(+2SrP@Nz1l4yw*ut}ZQ4Zs?_4V}>8K&LiMEj=R_zz?Jx*h39M@Lcs1U_r2 z?qpY(TXzF2F*Lsu9^ZvLf}p4xscznN=8Kl(MG})O_|bA}I0n5szc)bQdAl{7AVI5; zBQ14HG@Qcf|GZik_pD#iU6~^>)9P33D3}DOd1ck=CGO4VQ0r}vg_$bs`Qs=#`br8j z-1+NXaigv z?!)0f<0_ELWYR2D7JLBxqUBmkgpS7pRIvquke>`1lh)rEkZL)lGKJfm|Ax!y7-#Zr zKmWLq2Jvw9TjU^wu2^5p|}dOhSv@$P3%G0HxP45%~Yr;Eas#5LG}R+bJ^wfTin zg?1H6DRfEN+m7K{qZx4wb-;_Kl#gemottem#jUnOxd9-~X1y4Y#O2F%#*wLkKw%1X z?2FhH4x79e8;Z?3c|iGb8LK$O^yYy4pW&7Rmwu;As9Wv2C&{U|Jm&ygnQ+FT%fF<2M1snb$|ILZ zV9H^F&6rjDZq|L^Qa2@dg@8s>LC zSlRdDw5>&U#HX^?#9rF&+?yEMpIa$c{dk$ zRf&J=>)8QG7w}1PUNclF1MG6iwOc&#LT*qd2#?MJ}skXNJ(q#Bqu-$_E zcC&rX2)PYc)bG_ufgQnj7sV#gXlrqX`trfLjp_pWgV~w?8a%W%;6j0!zWSYj}I-|-G#{7k1^Agr3CQz(@Rr_Gdy z0xx#1;@Sjk{7#^uPr&?Jv&Ct!E_WvfD!(KxiR|kqVr@q$9k55y$H9an`>}aqB}Uuo zMCOh`dt?N(fFEZ0QTF8HKAN-ep>@A9ti?msxKt88GC5aGz)%DcyEs`^$ZA#5(Lddf+}ebQArv|y3l>;!^60pkjZThN zevb&kL7xM<#)jjtARP4pk`ofolBY zhx{%+RaB2GVbCOwj3WnCFIRe^bE1XCKoWH_zD#7i5Pj&1g3qfhSmZ5kX0j$@7W85I z!fmz+@*<#&5dxF(MxhK~wu`G(87ufnxqRk$y6n=&Io*4?Y^G5;)kZhbrPFRsH+p9} zReL6#REoD$fX%N*v&%0lki5X0I~5;ALQ_@I@N}{v)VK#9r5pm+|B?FBf0}*`R}Yy3 z@3rNj$LCdTz%SuPIef|t2rxb{NI^nEq8Sx%poQi7&%QB&8*%PY#~#;T?{OT56egc9 zL>QJ85{-~oi=~K3dsTA_fO2P!Y)8UDZ)RIl_R$TzDe);I0dqCDj=px%prt*(xIwn5 zC-{NS3^E#dh=6}jRUD9_u0&d}T}O;|LiFRl3{a^P2e4T;`8v8v_gr~w9^p5+IBUI}7950wUzOv`})Unoz~ zt)Jjz@$Vq7OTjj?>TO9;QJStHfsg02SS=>&*81s&+#I&+2>+c8>_n%t!~E9r125w0 zh?_10c;Ksu4>}H4&bgUHd=h*bUcR~N#YqA$Pv5#<1`1fdk3~_OcgsEPJ#e7pO9A5O?E(Q>kGP9 zAzn(OQ&w8sqkylk?@NJ`n8BaX?S=xk7FHej$%<<@m>fu_%^{Pjt*uyK2dW>x#wrq$ z4T*{QWUg#y?`|Gah*i||mGsYgRQk^5;tB!l@KC(gY$^79dq{@hW>=a41`7IPCSM$# zYsvijdi8X+tNs3B7P(TsV!}^SF~Jvn-p{D0sBW5Bwu!YS0)Ts*4FIfuc=NwW;k5PZ z?CMhD?KilrKl{D8DG01+Qx{fi1qcEV)qr0BkARTH>3E=U!s}@YAke4xyJ`!yK}AXc zI}LyQ&VEO+5jCq%c0!*lrj`OogD6(J*s9k#m89ks|8t=Dr@g(fE-r2f;dkm(dFHQP ze}Vc{qIglI^$GrBwH^;Tg<`OupS+2q?9i(#zZ|%xa8P@+cb&ooTx4ao|FE!gdBy(j z%0*-|TM|7%X6YJlZzC%x=A#q?(g*HLZHnhLNFICUz?ec zI+6zV{zZJIN9w}4bkqhL4e=Nn;dgn`J8bWjZIA7=07OmZtlSA#-ATL=<-vc46Bo^< zaV1lBgE)qpBG^bet+24s?2f5Wy#-pSNNs9(!s%8ei$ZKNd-~L0{V>2?z9;a9T$`MM zA2u8gih-?&!kdQ(7Hh?{sShf54#1(2)8SaW$@w^a&FgMK?`uXzWWHIC-TTqSSm1Ot z(LbT1kb=BF*~-&o3LFoAr?dz8P32~leS4dsQ78CeR0r+9{a6dfp;aZHrEIBmKe#nC zW4AO*`wA5)lw-Ezb8>TW8GmrSm;e|`4gydQZ8*F!R54T?ALFXN{rWu`9miIxlZQan zZ#)}j4T7Wa?hu@J;COsqDifC#=)CeS$=Ml8QxKnh{POSC4+bnb$ujK)iRkp!)`jh( z)iP<^dd%WNEv(4Le6nu~7@GJWqKFJWl7$_vwkSmUqr*Za<^mVe9De(i%;nxoLlEQr zw8%Ob%hR;rm4yo^Q%qzL(?2dRlSm2QgQ=l{|LN>x!S;eqaRA{&N%Ro(Dsm*HF{}P& z4H?ozOZo$W&|I00!V@9z6fz?&G-ElscnNkfR_L<7h1@h|euOec& zT`%^RB8opZCmY6$s|4VQ){ANWS)@f^IiytKBx8cbW2j0XAz_Jg!6FLHRZ8TU{oq@Z zl`epT-aa19l+kb|6*o`4R-g2A*~^N1jAvy&9pO-|)#Qeb1gK54yfd7hKbtNQrRTK2e_vxd5d%Oo zGAbExc7}W*ro+-VvU*ROl-woNYJN%qSVS3pkwSMj^F|vY zzqmJ@O8`;jX(L{js~3QBW{L0o`;DZOl~w7WXkU~An#!M}1xma7_~R93ed2j=bZ3Yu zl%dw&{{$02TEJ0Dry#xO8yBYz{NuTy-IEa$tnu4joa$dROf{orDzJ)O{N(@qnIn+L zTRa2#WHJk;;^vzZs`Brc_y5$^w@&(vptzv7*U0D3SUlbyX*$i=dr7ym)WlFCV(lQl z@q_J+l6&~IJ^yhuT!GIE^XYVk|C|aoKY^Jj5qG9eT#zFR%YN_L0T1|ciFyz3iJ>77 z0GcPJ-}?7!j9}a)UCo3WD}+9+f@wX`NkR}h%X?(+5gS|>zDs5 zq~BnJ%k^QSKcoHaTK!NTSn3L4?LqXvMTn>_DBh!LEFNd!_rJj`(5Cx|&DT{IH^Q;eRCjXJ>Dqik_ObzckTe|krH3fL3@m!fl4 z)H`x%No-~#d5cs&Z>v}u1_o3Rh1^d-V%DH`%>klNWcqBQ3nsT|#x3MMW&g}fcq(tr zOYRqTc6RY3Has2|n>3x-_h;(bvrfPJqx^r8Nl3F>uU0$F?>$I9^e!zW0a#J7o0GLj zrv`K?#q1{Mlhf#>XpZMhbudlVKMUq|j=to-VVt9!#br$q`FQl+TkE16VB6 zQ)(IliORHzvKP7Glnm`@z#dPjGWy~2qD)sIv&GK`)UB)-AVNh!R zm}1SeTyoyG&6vRE{f!V|pL{Y@wzhT^j6v;s(JV7(y<9#ov7ckb;dFF=H5Y-suyMR4 zgnaVpM*C!A&(QJFw$P|&ZY~nqi!t74G+F9s$vGXk@@*ntPDf!f`|*s*U+?xb%8Za5 zi)YXzH8oYS&dReh-H2K>bgEpBq)?~Lw}f*?FMNA@OEKGE z-&S@K4RVh2*{Zq7rLExB(dT?&FIn^Ec{+PXzBiq~{K#Apg&So96B>-%Y06| zW_15Wv&tLl$#G2woyR#}Yt~cCi-E}dSPt5E`SShg{wnV+9+y1~lKVk(^ZfICr6KiV zxn9c%xiN{Ye&(it0A;@S3s|+&5e^PRdfQ|n044kmK!OD!VBT&Tq!sGdS}aHcX?z74 z)Oj!@^Z*c`VA%7Sb(JG>BFo^o+gszXMnt7lGtlX1Q4C-vDK`lU3Ee%Kbf2^n^Mz^#l?@lNFChh$RG=ln8yys&{?YA+uw=LEDJ(iRBbn4%@j}hoMc3)CthIE z%Uff7nJrn%-+a8iurBs(NH@pf=HbcLT$rpjIg{CU`MxQ<_lq}e4<}qIsczB}<70Y; zR?Rj&4$CRCI*`2x3|N*wvwU(qnT&EeSrC1?_s+=lc5@iQt~44EX`|Q5*2nKNXLq zkpdufIpwDe>Nj&b#pj%v1a+Fg^Rb_dhtw*iPv5!WvEiD{Cm_7X`nfOGB29fx>z_YqdH6BXhTYT;> zuQxDMrUOg$WB}aS_?%8;7Ql9%j3RlFx3OVRDp60VQCOoQ%F%3qhlFcYDT_2&X>8jC zaM5mV?$o)irnVPbLo$HEKDz-Jt#pgq6(vA!dt|ZY5Z@@ZXgGi0Q8*fo%bwq;T&$X) zT&6wXSlWwI3~9!iAl>9fOiwfjH$yurNeXfpTfosP_U?x@l*y-$|%&Ofgd zs+1Lsmbl*>Pf+pJy^3u_Ec~(_z?=&+g%Zd(a9=~F<`rfRK_1yf@P;5JOB^kOJY^)*} zkIo#!r429E{xIAdq>kl)^%H{h2ux#wNC4zo5zfdO zj>nby`1q*0@0EW|gddUO=GxU&i;)=un2dC7=xi>q$Da~n4uki&_$>$`L{vkVGF6Q@ z9?Z_+bps(Ue!voC{opq{=pt}&!J)?`;^@x#pT5)((BQ!Vmb_;pRLA1(aldCwgOVfb zC%#a*1^O<=wNc}+$b^6ph=Pdv!@J>ptH6H8Y9G8?>JEVoLhy95Q-un`~hy)^H)=dKs0HHBEF z2Cf)xoX&fN0W7egvw|6dfxA^>yv8Nd2v>x%(NL3CD{i}TB{Mk|3wAUPH_FJkr>^@; zHwt9GbGHs~!5nd!jf&BzCcKhmgcgQjN=W^9o?|@#A;_`9>;5$|HX?vW<2O>&N=5H( zsK%v3bHU#EX8U&`K*D&?QpR8r4aX@#{#ZzSzJubMO*8yaGX6&mZO1+G;v;~u3uK7y z3nu{lCmhiWBPsIRfObsFq1huNL&OiC-X}bSxf5)79 z=ze{e%T=~%+N4&6AIvF16bmj>}>) z&hO|kQwXx~Gb>6Kv8`Gd-F^>?4$vfI7p3K(=l*B`U<^HGTvws@0Vz6|qsVCm>Qoxz z;rN{Wx4u9yfq&OeVIRA;&8(xVYijt{2TXLYTrinya+y?F>Fih|LfC*7cW-?ht&%)( z(0+~QwG(jSPQswq&d$t8{{Ajc))ySg^kkLi`sYvjwMXw;JyZmZs71!*FNDb^iy(k; zQ}O20x3?eW%gz_;tRoxS8Goo7AdC3Jcy@G(WVc}50c3Ecm2?_w#8{9@^E>sz-_~Tk7@-*J)6J%A0{}6m0mzPSqrhU3Upf3jGib4W@e6m^J zF_QV2>&Q4Ma=CG7ssMr>W+~a@gnVc6lI|+l4qV|f5JN7b-Qtz&*x?2cia(9ASGzlB zi+fybhYyxX4PkZ;)U8RY^i61+^sJ8~=|mmOkyzeFoW4BWs7M1j=~{)OyS+RDMceB^ zzw)mq9*mv*O_Aw5-cL2(0NsW3d&MU0>e``6EU??C^%Ux^g=sKjWk^_xjbY3%A|HM( zxkeAA)*VqZ(v3BJoOD;sHodC*jf&Yi^#zy)GON<6+DdzRWX1UBcph&!y@)A=Xlf2{Sr2Y>J$s4s!^OYpUd1) z#7(hFiRuL+Er=hhjK{L+EQb8kV5{F@nXtgfam059_>;FKvYHnV&>5M0PVcNkO!&m> zhk-d1giKH_B+pqx@cg04NqF_P9D@8H`BT-5LUqU~wD-gH(F{HGS9^dzqm{L8!j+!* zd4+r7Kn(#vac(t^&Us%Quus3mR^K!V%`+cL%V{?tWtRHEuX#ztB4@VF@&n6@KsO62 z{*uWsjc3gnPX&4qH z0BfGc7^^FS44$l_!D3!Kd$!XzH~y_HR$+lDXPURUotDy^AOFlX@C_~7Xhmlr{OR3( z!S%wuMy)=`E`H>~%L)K`v$)Pn<^+;^L~3`a#{e=06+Veda7r^pO1V-8%7tnI>diUc zGdGHkK-8&xrO^?g$Q1&# z4fPD43uddQ-_c`%1OAB@8h2{1Wgq(HoD!jMnaf$bUA_RbpcSY0R|E{#NonSqmquZP0ixjjh7dHRf_UgG zp2e(r&mimjVK1AhkCDdSJoJq=XJ`+p!yh>Y%=FqZg&ImB_L@F1s|MTsdYDAVx!kfb zWSiS;7sK3{e1?E=wEW>j!%Ls?vAA6(TPms60!_6z$@2|PE2V7B1`I9#_n&0I)i~Jq zQ_#@Z%hQt97V0n?dvEZ5;JpTs0<(s%Cmi}7QlLDj|JnUv3Z^X_h6eXR?D$$ zoj{ZMWaH3-EHf(<^j1aPf6ePbX>2aW6S%u22K%#p7^`RD>^oI8v9xC%N4jeBIT3F@ zu+&jnAw?QJr>1_tYSbNCJGo-?fvo}|@mrz-}hrj{n9O6fABB?Nf2;V!$B37`^9{1BgP z(3U~uhA|%UnZ2D9U@&=#dp|cQvP+q|V$%-I-l5?^t1nctNWY0FzDA5&(+(w;VWd)qpQROw6;YP zBGJgDvmY{sALu5tBdT5A+^yw7fRuaEpfow=)S)%!68Cz~RXqR>EH*5Cc?V4>pm1>} z_Tw|-H|QLppa^kLc%I|%HvyBy>IOIY?vZTwn^d+nY;XlMBk+v@9CkwvU+DxUEoc57 zpKk{7u08^c!14Le`ZWAm8#4yZk>2XZDQX2qSa*fwjtSQfujj{x!Os&xYejx;f<4GV z14KgKsPqsLG0njQF;l_MN^ux$NZSieIol9nBXZ*h-o%~?S0HW;f)pTN=%ky4pprKN z?sLIT7!w9CnIA<^2*>q({Bq$saNJGcCi7&<7U99_Ttj22N?Z7{AqWspVL5IXin>{r z8__e3KpEPz;G^suC7lJB{_l;_XROEh6Ie=m>S~ckHvu3|ol|gHVdgP%b_$CrJhOQA zplDkbJ^*o)W9KoEOJp$}jA+B2wjhshXvf_7If3%d(&PCt!4BhOxuK|WHUqsxqb}7= zE~6mbSe+aZmRgx6i_QYASnC18k&o8&i=Y_WbJu?PVHLl#EW?-i?msdi$|%F9kf7_>R1 zOO;kv#Zm!cEr}HAJ4kRdVM&g0|^Irn5ts{V&@A7J~6)pSl@WA2gJ#6MD@7p za1y)d*7si&-W#1z(u2r(l+tmhp zuC@f}Qy240|six@cgptX_Vy$J_J@Q@F3j-7`Ft(R#FV8m#iNuH{xE~6_1Ay?&MfE9E zz=8;h>{-|P+aMWQUHD7mw5uI^ea5+h@Ps=^TN_?l&D#_^&E9lqu*aaDwItKy>ShaC zz0O1~=h{gW)zEiJ-s>Jgxi*{G1CONJxmMZ05pDe7li?t#;}Jg>RL3FogS%uJuTp|k zG04zTN3t6A4&dbo^O&?_<32$OSlpfxY0k~1eW-DJrN!X&O&eyb)gB^uA`;QAZ$Ky{ zZ3KjLY*TQl8HhVLfJ0gh(+I`0nwO?R;ll%W_~j-$AAt;ATXz$hHNU@wnBq=n?S_Cn zjshg{*my_}1PNE)y`~^6^v|5|BeTV#3&6%=zlP?)e<`b{#D+srU*FLk(G0k(Kef%O zcKX2XgcyU>ZUIGNTb$!qji-(u?}cQnmj$LyQQrB6AjRi8DKU-aqJ7;LjI*$#mnocN zX_pW2d3N7=0AFT?!<+DZXY9cjelA73xSjR@=eyv^BukznJ@Hn|hGgO8zoG zh^Q5kF#f$=+S49%jm&<=DmcXxMp=dZl>oy-67T%5a% z9wSY4_pV)QuQi`J{~ZIKet^hFpt0bk>YrvNn(?ZC%=?y4WU1Uz?5NN&8mXJK5@{#0 z(OB;sX}EFQPbYv-JdT!AC|IT)SW2MCrM1+ze9B{E?YJZ#kZEuSeMGc2sPoaDXdDIg8|5KPgeL#Ab3OHrdeFoPo<}jDCM7 zU-i88qWB@a}h?`o5N%{in~v1*K4zbbUwT zfvk#@wF*rbnxHKQwNLkxM&8c`43l(q!MOg0-Fi-e4&`sGEoEL_8uJrmW6RGccBQnA zv~FbkxY148c&_U1Vx!+oJ>5MW>{eYvig^oSl1J`5UfJU3YW zSk*wb?)t7jm6c`G;#pd@%p&V2W;t&o97Hcy0Vp{k$~F~gY%`eVr&CdU?HayPR4sdA zOoopGu^$h7Cah?_bpvfkZn-3Kn3VkEbAwFguIDa0jDK~;1i6h)*vZw?iM5N}4MnZ_ z)hM4Ra_nrdD5f&1Dl*^9k{xli!^Qh>uaX!dbLlcqES~0;)U=f)^J+MDwnDPC7N0`e zr+qDp+fObYr+z`@x7YV%TbJ6THrHqR-e%qoWJ zzk0Lt=sy~ow%3kiSjyLwX3D2|bF#M}46G3!5sNPbP~eRX#hAj){@T*5M9N>EqEQ;; z<~oC~HpZcYoL>0fxnQlmf3kfI|B>8i;^pda3x|9_7$v)~aF9UiiQ?9gzM9WSFvL_K z;uo;r@6P)6s0?k>sm$NA1=n272{%w)T<-OJl z;Y-R7?{V(7=Wgt@e`>~4vD{uABGk)lAFHr2R+Q0$dw>`($nSKVCy~wSyRb6bfbN1Y{ z(NeQelxd3(-OMk-843uXDWPEEke8KAq^Z?GcXUtU{+>1?uR&)HCpzLsyP_5ztB!Dg zFu+2L)k7YwETpQYFX1*cParQKBJmU9n+fx_2BjmS{yWO|YFk8CnR@eKzdGZr)6<)k z0uHN>N$#;?)X%XWH24a)0)a83;(1ETjTG>nJoI&`v7&Aj*lhp9qYc~A-+bZf%i6#fgBUU<3s z2TYk2D*R+l^p~SDXM&PM?;aS)M13pC1pglc@*moL=9w`*#U`FWNO6`63S&LV6ErWh zo30-nZXT?cpH@$yQt1$}NjKC#7BpS|tRneanQ)%erh%?<6cy$)GLp?#oc`O=B`t|7 zG}T!3P<8_asb+0G-W&E-@IzH*p~dHO@mp^Pj-)}=`^7;b1;qB9qGH_nbh3$(+=+@h zKT+r6e!Z6qah-5hrhWDL&$rY_h-QpBJo=e52f9rjdG~&N&DR(1^sfcWjIC-gMq-Tk z;LCjLZvlF6)T4BMC^^0ihzp*Mt8aL4EpIBB-X&NZzfahVbqM)^;@Em+(2;Mr!JPs! z|C$-=%O#5+ATC?%yFm5NdP6ydZ(@bjaLsA<396RuED=+LL|(CgSF}aNT$aj!(e%i2 zyp-wry-XG2&aSC>{lcc*W`PHw?~H^96mz`>}KV|dkUo-!kvOq2za7zs}yzRtnICox@z;TRYKIigUz zh6y>0+Y1b?6l8yPM2Ya7j(sM5yyuXorz-oLURCyj&9Sn!#QD@m&40A9U6zp6BGo<+ z)4{A{C~AzpAAM?RZ;?XB@g>rKJPlV%gvn1-AxtNYnXDajkQ*TBY| zuv&M_&ydS~;4pI_e0QXbmm6`{zxJ!z`I%ttG~aFSmCx|^UyklydD23g!x>7IJXiyR zJvyt4=qEZiZ7D1G>8s}wa=VH?2tJO-?H(c9ovu+q6g+iD_@ z=B^|d^&e7zpZnt)Y78}p0xY2_&L|FoRA}w?t#`iNosdx;2FJIrMI|_U>v9+V5VDU} zY4(&EBW9lhCB!P@2n|H_7MKnAuo2F7*JckjMqZd=;z+TrHSMi8F-I%)VIX6h-&DUR z(Rld1HvHSk_hk4wR5`J4(k3pNZ`2}4*B09V?2W&0m83{#yPbMdMBMvNY0D?rMv=+) zBcRn|HJmvKJ(U?*rVi++RpJRVwf*2}DWmY5L=S*QSEeds=1TsI(}9m3q}GZ0{-$I6 z>(yO3Jk&#^DotLst@d$ki$TsX<)n5)Ty^5rgs|MWd|%L?5|oG+?Wb#pgxg8ztJvcD z=r>s}r3I{PoZMG9Ok@AQYgFNI{v~FpFXi}A694J9oO{zHLe5(3KLgmub>|A{AM zNC16xbelxozmEhz{vi}Ver3QRLI2-NUeSQUYU5)?ROx@$fWizCRtv-qiwgelrT=$X z|9q|g7k^u*&}zuY;gn$m#Qz?I1lP<_tQ-x9OBd_%ht#V8Y|Ql`CJdiP@bPs;ltMOT z?6$W%RACOn(+}j#X#ss2md_~Wd`FC|}K>s;OeF?(I-XN2lh#iclA|dgCgoG^6 z?iBezKp^7_>mlPwdrvna*y8z0kjm#MGp$wp9kALj_?k;Swce>>2N=bf|Dyf-X8c23 zQlMHDRb);7wiq_Kim9cxu&@A-Pyrl3u>4>%2e;5-kN9J$sqjm%;vnMgs3H(nGkowN zhJ;LiBq?_BT%{s0mJav7i=%;H*J<@?A2D$Y1=~|{n1hq9k68fqw&DW1kvcF+F9h_= zc6;f*b~{&8%5H#s&SW7A+WvSY8UP|hTUuKFz^Tn9_6IZhEA9^yD?F|R`pL+JqU9?} z1!C_JcHKr(xr%!`gW}^cr%lHqc#p;+aab)RfC#+|0I2zMejnNZM6%&PmX8F$73Tpl zjF;z+b^wNj4ttHwW5L}}Uq2SO*=y9b$D~xF1~1WfJrK*qj!vhY0>o<-Z{Sc__MJs4 z)vGZzdY%%vKiu>G(Oa8;(=+dgF^*wUH26cjXtO&yI@a>(c>C1oPl~v={--g&V?aAQ zyHcp~(Z~JP-OffrqB#wfN9^}UPo4rDT%aDTVi(Oy2ZToCGWfq$^!@Ay?FU?7rI(zK ze+ZU%Y_ZC=6MMn;!ilSQ$T!m_GYZycy9Wmnq`e`zFFJ4YfEk3m(H<$llLim)QBwXO zD0eLnSLjE<%aw^na?@;>X0k!?B%5YX@x=W8#G%~mtk=0l(zD&Czs|MTs!Chon zee=Bcv~@hv^L1L@$;C+l;5H`#Rde6$Xt>AyGxCGYo)!3Y7VPq{1S7gU&vy%hO_ax;T@GhSof1%L5mbXBX@w&!*UQ^r}pctl}vseCT@3mUI8Y zuz74i&egi_Q`qp`S?%DKfxa*jHXU*#G2A;`?w=m_`>=Ex;Fn#6fn+|uSXwRrnKJF% z8`baa@Ot;@o&~x80<|bVM?ot=2Kv*Dp*4DkNDGao4AQAg$$%N5l+M*_%t@sLfbx8m210>Yf@%JD=aG8& z?O3DfTmf;RJZ@lyb)oVu6u8o%`NdbxH-bl0rgrLu@n^C>a8DNwOC>TF>jPQ2 zu?Cmhzxs$IpsR-_6iyAzB`aUEE;37Gvl5w<&;8qX1PwB-+V#z$B zNYC|kQ6G&z>AU=aHy7-I2+M<|mQouFx;Ew*T@0Kd<`^Q%p=2IONdrgYnx=wJJ`MSr zKZ2A>gvHYiw1R+9j)R~KYvIh$uoe>c5kfR*Q8E9@@x0CQ-Zrv%&*Zj zPU J8x#16dU=SeMJgj@{geyz*ZXTTqxCW;qrMf7S4n3t)Sql-vcq}_I{*!iP=iM zz_4(ULM_hYqLLD`*U4(DC28H!zq)ml*1s-~Y9bxOvpf&s8_ZDnDg%j;3b`y+xxv|x z)*TPs{k9<)OO3^1SQNRWOz7bZlXRKEp4-wdLRr(7_yN_T5>QyKq(?5$UoUjxqt_N@ zHJQy@&lZ{ValV`iNrDMYk^@q4jpyQhl~O2$=lWo2+Nx$*ia*Qp#_nF51+m(tXXHrB=vFyYlMw0Bdwc-0!tmjw`qN!R@m z3@}B%Hom&pShFST>IQr(!O-2=pJ>5vZQZUJ@T44N@wqo9;<~cXY?b-a8MDvOdY6K9 zvMTG&dn`L?IGj1a4Q1Hg)Npk?MFmzVr<2FHpd5Z7gKEXg0wI!aH^)WB;`ibD1nqQw zB?~(GSHzCO@E2(q<~A>mXUv<20{Fwlc!m#~Ol@X|7yiK`GGwHETE!J^l)d5mCn9H9 z-h)U82?<#dd+l2HKd%%e(>kMQ8D@&Po(At*n;xPREiGxriuYgDM_x#6CYKqmvv0oC zY$5AM;V^x#*e6!U{CD{=z}U_dHs{>63orZeS-1O7JVYN$b9|St7&MK0n}AaBtjqQO4Yhj(LoWw1guz>^uEG$83N8s+iOFlGIll zi4B(Dn;=j^{Tp4$GvI78BTRR*A{U%cH97_~(%*e=8bDS2r%dM`Li5TaI=Y`s7`@|5 zA$4kSb?OEqeo+f#y;7+7d&lX5foLKp@nNMw4MBUufH^ydhW)kt`)0_;y#H!vlyLZ; z6{z7{I!XUri(nh0x0)MGG8~P+t2oL3O880vpoIUAulO5(0RHP&tXSdyedk|&y_(wD z*a{>Q|E`11WZqkCCNfGVv*h4$R*hRqzT`ANzIfyxyq2k|0HXjGWQpg$%dfV5&}Vxy z(PLh-FY)wRnaRY&G7RM|wF5ECq7M1-mt`M_ItG4EC@RSQ({W>kgud9#CG>oW&67zN z-yPMla^V5uXG8%ly|oKx*awv0Xz#w^Sq9w-(%4X$B_Io1E{;~6)7bApH=WcO1e?EY zxx<}{$?)}b@m+zZ4~X~buF_U}7;udFI*`B={q+)XyJP0|_6{r%4vez9Il``f&cg-p z3%}32Zg*{VzreDQB+w0wjpLK~H>)#yJ3AuA_qbBX1WUV^4K_hxLZp*LU(l#kYT%aZt)ndxG+}ckV>6JB`Er8L2cq)x zF;hR=gCqA}jR~nW3*G=mAQG{^>=%&&dfJ+tSMm?pEt*8I89F;FO=rtn9ftalkYx$4 z-HAx8vvGTUIn{7IQ;O8judlPcfvPN#*-#Nsg&6@68-O?NYz&LBLgz6SuUigI-#dz* zBw6|S6oBElj#frpmiqrM#DEryZ0vF7hP(6UG_)29x58&z=h?i{VJm!hhKlC*c-S0tgi^dO#AJ!o__#Il$pKmObC zQ?;Y}RFQIk$>nS}Srf_H=4N);;Znn@Dp%>U<0rtEM9BZGXKY`4EzCQdiM4j2Da)-s z`K#;M&8uSwDA>ZKqgXw4S=i#U^uQMPUV5^nR8HG4A`Wf?2xfrYc(7QJ`lH!hh^H|` zdU%fS{(!PpzL4riDCqg32ch2=kv8*ED*&InI{X~xGnu2(TdYzhmMw~;3_0Kp6;NP! zJ2F*cy~R;&c)Dt^&G}Hl>v5zC@`mjQ#Ha}SA22YTgnBmF9$>cHISB!??(zJf7!a0f zy!BJhG8J$h!mKr$4`MS@v%EC!5lcxZaFUZlI9T<3Q>yjbALi{|c@1^qC;cp$)W$LX zmdLBq*FtK8!MNcX_eZurr<27oIRUl#CTB${rA@v>QO&O^4L0ZtwWbuQynw3xsthW8 zbJA4oq^8!;5nTJ+hn<^;gTcZDlBnFJXNR|516(~q`S|IW*Vr^OlEeyX7&xL zmgMIC_QsQ5NK4b;Q(<`2T{i60$UH0;h7+r7iCbdNCT<&mgwaJ*Y`eF*g*YlXm zKL5Ttzj%(c{k3L*zkO72Ij0eRI?%!2@E1Hf#LdJg(eK&;3 zAhPhJ>jHo~wC-mDTGUGQjLAdJOgfLM74$(vB#Kq{A6@he!xw5YWh`hzMN7wp45MuWLyX2zMzGCam zJin%z2nHPwYL$|N&q#VzG5}OLoHQ*F4V<0yJtW|C1FC}Alq3F>&@6?Ml*g*o2E+SH z4b>8i(+?2r;)5Q7JO{IuV!*sU5BSy!UIIQpJl$V{Yo|WB-ry-!8_-cJ=F#3)80@3o z>O0*PpE_?b$aw+DS^Qt_$dA_7+uiYY!VH9uQubI3!0gD83l9}xDrOLBk&CjrvR2>^qO z@kh~5=d|?~6w-+eHHCR<&nD%=(){fJM4T3N*_VrBeXw3J?qE15ypG*w_5GFyAy`#= zIl6KU5yzneoR;&eI2x zuhW^A$8$o@KH%ejfUBdUQ{`aU!BeDGxlv=gi?lNx!CIjIAs?3=Pj%gJFfJ7^w--8` zdDaK(+Y{EP`m4w$1iUxXax}p1J%1-U%x^RTYj1MVU>o+rX{T0OCi+Xpt!&w#tdMH- zv0<9wOAyh;?PYds@hqy;1F-(G04hiEzACp+W!S`Qfl7=*CS@y8_;DYa&r2{l>a#Mn zLjg^BXPVjk5XNUu`_!84nfl!_mNDko}J;&rEHAw{n2p z{lu4Poh0gCfY4Dw&btoU$E4LG{d_X++LmD3?P|Q&4e3KZkE_GwyEEu_1l+<}if8t4 z2oi;UYNfm=9JuTjS;%u_l5q-4YpsB%khpz`axokDin1J=XJQatgVvg(=n#rl&w{_L zEyT2Ua8To22g(fZeV3+IDM`yy^|S+?NDIJTWRIl&t|GEifv?i3n8$WD$y5cJN4}$} zo4w(c{2n1m?3RTfnPZN!MP$AwIe07-HtUhZn;)dJte_ph|HT-hL+g)e@*!d`an5S1 zr|cJj)wcMLMUO~+G${-m;cMR@nJbN-YpPbh8<)Q=O`{|&Iy{j+IHS*#9zX^GrB1G$ zvG4V2gYE9V<&B|$SUNpjsn#{@_2DBE;-T?99*=CV^V3)dka$$6-MJ$XaXkBry;Qr! zPa=*o58q>x%n#K_UKiYfd}v8h&}98%g%K)0$vci-D@%$Gja*8FJ=*>8W*pG`Us1%T zaN55A2NxjI<2RZq`Js@iQTGn)4QL|ArwXH|Gg(bl#uJz}?+lcp0bHQ6@x9O_tT)St^c`o=p3&VQ(1zK;`f!&x9hg?(@sbU4chZdm+tQ6KGRmgTh83BHJ2#tf6z3 zpA=u|oSdAhbb`X`3Tp)7!W^-d0O5XS$_ec-zZ1gY@*^5gb6r$Wu7hYNK!zHhZ-`BQ z97yD_MnI>Oi?}&i{>DlJ_X!Htqm1jzx;Sl-hafptjlpnIW^y$ESP20iFl)M4wTQwK zj-1)hLy~cyNSD)k<2zvC$FMGrF3D~Ea^9<8j|WD43lrCl!R&fZN;v4CN4W4K`F51= z^`~@gnC(K65Y_>#^jpxk($)-PD$V+zpLb7Jnu{+uY}|p z1`)ceEj$ujf%Y+4rC@siC{3>{yzoZ}_Etv(viH8uFi(?%!pgLM)7%s=gOUr%fUXj) zd1Y5+K^ZdMtf0t@KdLDu08x}J1Rc@=qiR3A22 zAF6|}N2oXymdMr3-b@K2VRi7&gBGY^{1H~T7Pzu9`o_91lCOD^q63UB%gx5AD?hGY z33z&TXOt&~XbYHGtdnM{w6#3yd7Ohj+wT{$=YyvQ9zLHiFv7mW=9M@;oXI7Y=37!K0 z14OLlm_!J-3}SC8IK9O>IuolOb?-C3_fM2h8v+aI0P7`;fTttOJ22)RdXdR~SlHe@ z1CK7_3DfPx^M0Grep$$h&+V>azBaeJ`%_WfCI<_7-tJwQ7S~i~AbI`9l5_$iu>fVe z5XwlRa3Csap$XT7(6>xzxNZQP%wxlEE*m}T1i5i%%zGQ43brw{KIQnKfB$IYtEHs{e=6OdOz~@_WL)Q6E(6S!B94oZuwt%6<_IBO2Y_NIx$(7(Hi>&h zT$CYF8@(Q)wkOL~Mq=DBbFZ(d0xYDy=qe!ie-r{y4+j^5@zse4q-c{CGzZxtHaA6l z+qDrZkab!;q|G~8H%3xb+4$&Wq-doFg+1!mD|AbIvX9$77;&!~^hJ2r!l^0|)Qg*d z{l>rbfLUpT;|!h&ebe*w8AIiPAeu>Op~E~VD%3TDon6C6pUS5Dg+q!h__RC^jPikV z)s-uh7TUybUjBv1dap07&M?ZBWc3{@$h|dyJaKOU7QMfRu<81EJ3J@uxPy18WF0n& zUdu&)!1opEVZw-iJ}DzJw>=sQMzcox(s3+I4_=yNNSZNq+JG;T!;e+!<&lKyodDXnx0AqdKn}a{Jzb)`#VcQH&y2LUA-E&(+X!O|%C)ZhRftTZg-1EgSC>Dr!sDBJ1s zZw+{|S!WZ<5qTq7CKLgFz(k*K^e)-3n4|cy{aQIa67`_wF>D$%KDYnZ@BJm)#QJl8 z<25do`ou@Go%^3ePHSMzIy3ZAFxoyQNDrokWbxP27uov+PA|xF026=j2loYcXy1{8HVltQef$2c(~1uJLfjS-mkIG~A=TF_t!LB}6Ow9}ff& zZyAY(bYb;D3C!RcBV$E1CVhjCp0S0od~oPaE&*0=L`{cit1+bp{i&cww(=m{!>F>( zepg^*==<^e5Y_|vCz{4&9*@VW==e#gb>@pSy>RY-MrbeA;vC~Hwh=~ zjbQ;1tDZVp9(%TTsiiuv@#;FJ&hw_OK=RbElo;UTuS?ObF~9)Yr8|KIq724U*Y!Tl zKt5!--3J|{Wpi3|27m+y?k{)oTKI@{5`4h5W~C#;(tsP|BNoivKrGEA4Q^EBn4KKW z%E3e=@Ke7{X__2A9O<8SE}s8Y`6+lOAl-xP3Gh%F`Zk1LZG8yAr>Uocl*jSAu|A1& zG;3qbfyH!QCGpi7?_3%R&qA-GT`ee(!kVPXjXylJkz@ERsJ=eY@n}w5l~c%fB2!2- z_ojQY(P6!V6IIs9337j>Ie*yEUstz{2)O;2uCGAKhcQ%>_OMgehqHy>w$-+SlkuLQ zfaoL<&vL|pE?r+-JU8JIK$%QC$q9(X)A>Bh6l8Dp0ywA8iUghJk+FdYKaIMy8DhKk zyfDXUT`$LvnvHhi%fjp5e_~_n!s~n(lq=KZ+`fZV{=5KBVNW3ne-Lw6pd)fK(UvpzfD@~H9(_)4 zC?ZtRJ|Y4W*2CS$4F6D^NfKt+H zVvBkkg-V4%15gXW$b4p^8f>Buoa;YKlTbpMtY6zhDCsg7GW1Lr>r%fW^d13)iJ)-> z3dQHy>r79i^ICM_)7Es+Yu1bQqu=18!3*S3zbS(ky;kF&Bkr*KOp8ciO_nqqHp|VW zpSyq9Fw*obK(QUb@KH~y^Tu6wch=-)e8r&{fuqig4f%yF_XiIU*eRC(M(Jfa zCg-@>^V#`Acv|VL%wKQ-&Ho7p)Zkg7L#IqzW&8KQV7{}Mv(oGeC4c}UaQL%r8;7LM zM`GXv12zB!+}3#844U=)oXjfg>Jm^eZvM*#?P7S4=+5K@baCgK zs$vZg0(l{uJlJ_jRIb}|hZH%z&_CyeY5o{P05-wCg+GGdOB~jRCdD<=+&6>O z?G8ojxK+2^4a4No=9OOVy2)ytyLHSWMZ)ynFf_6GiSCkdx#!9?g#yScVn$b1YqX;R zIJ42+<*x)E7AbpA^g8cScI)(TXe_MGZWCCjE|MyS5O?A6B8i9AQ$*9(QrJ^{f*Q?s zAR19+Sy#_5z!gKiGj+|TJ)crpW%zL}Uqx*-ar4wC%6K6)fS|0O+gAoG) zfNS0QA;-4)xVCrvSS>68oSsTq>=SsP1yJ~!U?60&;Ldz;c-;z6G4egR6sKXYAJ+p; zbqE}fUi^ePJc&W;>xc#rAUGb5fZMa@ew9D#!CB7%sU{-{>R0L0H{( z15Bu*!KRIN07(FrYOP~GGq$euV}mTQiVl|Hf0Q1UjdNbxK`=+gY7wuPk!55S;O`uS zlt(raG2`LMAYAj*4SvV5){h63+^qZOn=ajkXH(3R$>%f+>!Kwmg}`}Zem1eui5Fmt zvfCW_CT{I}=_r$0r<`x`&VYw|b8> zUiYupDLtJ2b?ts;#~xl6&NkaWohi5eCkqAXX1O7*C`Hw4NT2@Vq$h_b{!4&Pjd<6RqQ`S z?9sZ0R~Fu_vUvU@GpyE?Rx)tj!IU!gsOH8X}y_*9s}ltjtxbxdsgnJ zQcC|#G!^4wl%LU{l_YFRnL2H1vVW#5VO|N|S+m=f#xOPuW*K{#5la86HBuzsv`Er( zYpMBIL$0B>?<~$COlbVi#>rca80F>(zQNh4TVnpyBy;V0<9<@DxCO9!^4CWAdqC+u zM}+clM&Hor8Wr%1p`kJG&_K!k4R28NZ%iJK5BJ;01shz430G0oeG)By|5+3Ycy1RI zPW_c$_6TlNP{Ax=SAS4Xqy&#Ajzs3IC5m6u90Ey67lt#{Kb~7(L*IZfq>Tb=CB)kO z;eTTHF;WLo{{7j2cLat#fD+5MF^1~Dm$1lDPW@sLp7LayT)60A-71jz4E+H=!UNXZ zx`DRC-t*~i8W4o+T{I(`E)}yQdo!zvyYG1kBY+|?nXqUmk`#8kPw#~%e-FSE#MW5x)bSm(6%==8-{PxE$Z)cKB|ts8C>p5S$baC%w-B@yWrOj^8*BFf_Kq9q#SW?fZE#!*|i>L9gB#&6Yu}FS5Wz>Rgr* zZf6`z-PK(HfmZK5V}E&RiSG*TGbV0H)v%ZLU4^#YLU zr+$)vaDGsn_;g8SyzjhqV(GDRpHnW%yMK5%d@iFN$Nj{7xcC=QLByg4qdLkjq44!{ zgEKDLNwZ=c>1zt1y1_Tw#cm$rd;L?1G;tC>4Gx>U{)dr>*G{8`3@;vR$ z)Qx<&*+WJAAl@GUyU_Ke=gSFVb5@e|ZJS*6&PTX18QF5n{M}pGq~q|=(4Uy$dp$a| zVqdTx=W?1qyMVrDgJ?^*8>&{cr`UNTqV93u=M<;pAO^VV03Ef^@L7dkdH?-@p0jO*<9+6DJs}6a)(fJOpY8U9fwWIW9d+}` zJUSp~JMN6B{;$?L?S*L+(1A0XeR=j&{AIDcKa|L<8QwSA;cL9ycn$BY)#S9!+9f|- zsoO3XsS{vNY%*8Dq(2ZN?jGKAHP|Jl1LVsVDegk)qqf~ToGGi*WrP{+`oczDZs=73 zq$Evd*_|5B0Z<65$k+;zi6?%F-B1U*A{UpJird*sz&y%GJ&8^ahST!}Z=DLTmG>Vo zgXjS}{|xyoGCga6ec}K6Hi`I=$#01{} zQ^Ith$j`08&zDL4c+4&63M4=Fw;m|f7|BeUlK3-11@ zR2s;@Gj`xLZW@R!&scCeS>l{H07@i#UR4}xF3Ey&uH-aC@Mqf97{yp>Wo4esSfSU& z2b%`RAO#6D8$(Az1W#0FZILFpTtni=J{o5xHh8jZL-yN(A~G>8^+X$h80Z9t07U3O zXRmLJ@t%60HpYhzC2hUT_`TPoROl4+Y0O^aimeH%<>gR6u>vA|l5{Vu zj8VFXXocm3<#K)tHO8Kl>Bql5FY6oZHoG^Rt!T`wKD({BGTT178t$ZV?k+hKtKNct zXP58=e&lc^ZLqyCB$Hjj&{CN=@~H|zNjdb)q9=oIcGcb>AEgjzPmDdgDo^UuD6Wf9 z-VPAoOPv(C5Nj1jv{ODeB%|FDak>v=HdREe!4nX#5>KGUw8w=omrW`D@lin#Q;IE9 zg3|Sai;k%hs(u-nwGXp|rhcQ#hpZO?jqOl$E-|7EXhTa9_*fNqoX7z7aGY-AkLC&(+NjUE(qFl}D6}#L#3eGRbR#WykLz8OMkwts`;q-33WGaC%!ixc}D-?5e zy*2xB>a7yck8nOLO#_k|5at_|m+h3=RvTxFv@`&ZDauS_A^o*BjLPM#sgEA0iZo|{hiZn&PpplUTie2IVLcx$!xw# zU0MR!-UE&lj*`%mJ-q+uwDorH+Z~V1*3Xrj9Jfc0b^lc0nSqN2dJ%lSo=r9(7Q!^U z-7X3h16Sw@6lVL?0`D&lxH@c4%J!O6$I46Xj%d|p{7I92{&nO2a24A1{+UJ42Xr25 zrT7IA)WfvN4y;~Cf8bt>ZFYTCLAxym%du>vK9k^+D{r;?5o(KY7;ag!&)jUcT+b|m zGdO_h3o1X-A5X1V6gf$XPEMs3H|S*A#8@J|ge#eJUb1|d=VH+0q6E=mX1xyEB%A4s z=Kbn-x!om*Wrhl7pWo*q&C0P>W8&~GC`GaTsx!=Dy7oJUix<$sTBsirH-|Q$Jg>h0! zVNJU7!tU`fFkncwH2Q8EcGswg{A2;u#=NylQk!2P4wvEJ3;t(~qxwKH{d@6+E1T0L zFxi_AGvti1{DPLxG{}~(**`v9y|X9%NcWWXabSTz{&sDFmrS@;etUM>x~xSPQu6*u zn9glk)t(38Hv`pxTNZSoD6%Eh3zz*3@G+?9riq))M=6Xm(@;}NEB!S2N-p>!zIXFQ z-0hQ;F<$zD_&xM$<)ROk}7Pg;c& zXbrbw^(tCi?z~0=9fBl$AeAdB1dmhmltHUe*dLjY%4y?uKi?VrrVVX-Z#;d^iTnQc z9bFkhzeP5{cwM9)y734}nnP)*mt2#+O8t+1E}4*3e328C$GP7UTxl!4NVQ;u6cIal6?3fQKN*b7Q0eMjhijfvYH=B9J{f zpzRyi-vZ#hws{NVKo`RhHsvB5ZcIa=WPF*)gp;(XE)pSciL`)7Q?ZWhfT)LQYelQY zGLF7aU&hphdPOk{2ZIrW$SKF2&1?@~lawU8(+4QlJuZ!L%>CHK=i6- zM5)QwaJ4>_K^^`y3qS>fSlQG3&coz3s*6wog)@Gk@Qhw z5rsVDjkNMoy>)CIbu{l*zmrgfJ$V7v_6BD0kjt%yDtw$`Y}{^4m`yLl5wQ}#98igLc>!E1|sSFOjkT>t7%QWB5`dTk7mrKfomoscOb~7xzjZV(?3S?0^a(XX6uoRJZ z;J8DveYr`p!M%i_yz@>fWhR{1%1O;n2E*Ysh^k=iO$}aAC0J-(oby(%#l>xf55cuy z!B)#YL|!2whw{0SO-t|kea#m}R!&6ZvLl6Z59FDJ)UE~%M7$6lb@waEt1?tL z_QP0}z^a+qur`#gw{#KJ4F-=8Swy z5ng=ocbe5-69;49DH__iA!D0Z*8*{9Y=J>?0kQ^#9$vlsV_2buOdPH8$M(&cQjI7U zz`st6`#F|gOQcAtklH?0w%7XlJCDIozRh!Es>H(3`^t6kdolB4%o|IfyD=7_!>WUCKR>+#}@+_4;mwl$vB~{Wq-ZC7tnrWvQkQO z$BFT-LgB_Y&0YeAE{Ia7xAM}R~D zqe{h|Q?58?>lS)5cFHqrV0>dvwi$i+56xv#N13PZ(c}(KN~k4ILYlA|7<$MZwUGt{ zWMXX)=p$`upae-XpD!rh5e~+zLmaiS;RBLjuX@1@UIxzZT6qVVGVvHQ?rzmVxI z;{$%Z$iGl(@NA$_a73d9iS(1|o4989eKa?yx;sd)=a~1xrjoiJo&&MUp!g7dZ>Ivo zlDT~W3GG$siLdXNG|zV+L&!vm^hJ^e zd>Z!S8WIS7m#*3BFmLHE^O_w|-1Q8zcfiAW+Jnbm7FvqRI5r*WNgQSEc7qmA z)dNe{Xc_~Z3bmzaQJnx!G%4K!f!$S)P(OnfB-;z7P|;y@Z4;iWGdDf93*BU7!deFg zatoXor=H%7-fIQ2XAtM)-2F}(hsM(qoTfeD3_#1Zg1})A0;vla(*NpLthdrjcGo#_ z8iY?G<2d#>@3yPvL>VLc$c4ZdsGDdew_#=i$@@N1I22h5?{>QD=(7`hn27pVOP6%q z>fH2BJP~W52Gq)+PygImzBeBXC>Di*+{M&6vX1aa-|;5j0rEq7QO5`UIQ1fY^Ug_; zE4s2zrrnZSVER%cgQV^Zvh$B^iC9Uwri(BM{b)JmW)`Am!c&;mv`bA;yj%Sjli>DA${0Ho!RYluINq%*g7$krG zicB5?HuBtEW5p4u;NDl zCp85lgQ`sFmi%*vBm{+Y5>EgAyb*g1$WfLqZY$T{KbbYA6@i7WKj=!C=KDlnv!W-C zIO%hTBdNv3lgYNYRSt(EY{eF*#{p{52=xNhhf^_mhj+Mf|mWy z!O7D>X&|-VS8spyI^#@dDL z44XbY)5&;fL=Tl>Ki;tyTBo2iz^HJ)0AV#`zPfdsfwmI?ns)d3WU&E~5CsHBl)#d) z-bRghN5N0`I;%5icF=~DhG00FmW!{bV0wM9h7eIT z&d7EWC=?TEanCZbJ}X%(`4BwWWW@S}4+8YYuo7?pE{xX?$6}I@DUSWLN1>v^i2x{g z#wFVxuW>Vmt>unKAJ1WrnKvk#t<6@N2thbMfarIV%Qj1sLb@7BxU{fZ`I%T6)$b!I z>Mq+DAbPj8U=XYlsoV2){6eG);9$8B4j=YQjaA5jyzFXf}au>(YWCp$z2p+vSs#%9T6a+qT{HgS-_JvJl(S&Ew6> ztHG@am2?J7RTld_yn_N>Jm0+aoAdN5RHLI+-p{=%)!AWsu~7=Sa03BEXjyI|Wv5m6 zTw9?MaF}rtrru<6)Q^!9Rhp}pf%5^d@pUwW@la6@-Rc9z_Ca8qX1U~_3NZwkcT0-5 zU9LX*1Lq2{RvOP*+j6*k(#G4cSJ~R|e)`LemLemf$oBJU!6X(JPoEGEPO;+(? zX|w&UPRxI6P482mh;y{R4$j|k6roAFbe_?px*9Tw1`-km=U#9lG;EXn2&T-fl{`OywnnK|YRlnY=o9^!JN>Chy%Gtk2Jys)MgZDb z3!XZy3-|h)jTMVPJz$Gn$9|~UB_a(sg@ur3fdSQ8|qcA!(@0(bnMq$KG2rnA>AN| zsB||-cXxwycXvs5*IB%e`abvmeeXEGGsYQ%;czJ5wfA0oeb=08&QFAP4~i?qx9wl+ zg>%C5>~ji29GV%+wLCBk-&q2qCxnV?%(y8EcA)G^^N+~)CX({2%p0#( z2`L=ubHCr7x%xy;;Axnw<)+DgTk(Dohoa2?nLcc&``aP+{|PW5q<@^PiJs`9dcB7Bk&u()-E5Wc z@oi~*|MZUw?&B{-L#4>uWGV}F5Sc4$9UGATF-#Wpf*Ud+YkvAKziTbDl;V{*WP{&k zz+O<0QeZuJ!Tigpx|&>2ubqvssJf@?|;p=VHv?9dP4AT z*X%e5KlDjAtpB=Z15b7pNM4A4*Lx_KQZ%={`Rd2MZ{h3d12=I)kOHcmgCte5#)gC16+|KsuzfW};)|~b>7>dw@ z_}9YxEGh=Z2JOFnwVO?#^xIJbz>D$mS>Y-mp`@i9Gb1JWYZ@)q3}#)<8S7Y36%;Al z{Bba5yxh1T|Lb_(OacjNjHDQ2v`CPC=rI4b)oy~)y2_VRJ33JNm#sDf9QYShtnj}U z)0fd;1|Rp&XV{+aeC507`J`)`kL zNpR_UsE)P2Hrn@O;O?6K?GzpjF6{*Oz5DNayp;rZ=lO4A?f>Z$19eqfyR`H!XR$bR zjYdvxgzq8&kIz=E(rW&dC%yKz@;K0$X9k|OiJP0w0zK@?qC~ABFVaYX{lM{TQB&7v zdyA*TP(?fMr)vY(`lEHvcW0;;J$WW~ZuJk9>P2&B_;j&{bR)o;JulnU6mwLYJZZMMW3H86E2$RcJD{;xmHXp(_5sR zRroUqU-SI#?t+CRKZ0ISMaQY+s~52!x~d8G>OX7y2dFy6dDmX-WIR(_4}fUcX3XBh(p-5xmy%%xYQKm5y;N=|;Vz}hP&N751$0w? zY}B1zv@;vbyllAJ93ex+!5|0ty9q)A2tb%lXQRl7+#plVRVuaEl(`atWA6;cXA9%J zJ!2(=O8XkA69SA%dvk#JULY=eS8KB8h!F;zj@H8V_$+OcMQ~NRP+G^a!$si+(_38o z9gdJ0iC?v@n?dIG3KIQ2So}#3?`_BSUX>GQBOnV13E^Zo_8hbJmqc z{jkJ>yTFfuR*|m~{WJFIZ*szp^&ULJn2o5Zuufpe9CWg?-(4dzGPe1)uv6kgnZ1pfLnA1P)u4QWeTnEE^CBKm#&V)nYPI_HAIu zw614Hzf{*Z>$7cT7P@oED0&h&RML>5#u#sHn@NtuUq`C7c3XxcRvCgKgS6#pHr21?bqQqM2Q}XYq!8mjMcU8PU*4WujgfkUo$2wsgz9 zD1`=Fk-gC{T4!5qX8rU~+L0oPD|ifQ+J>9cac4}1YZ07@9J1uV`!@%Z7DHmCn_W~p z5u#~V7Kr`FHy6&T+hcCibB^Vq0MDByKtQ!OTTkqHFkKLC=elDl9?o7MZEI?oqP;bq z7TnudeanEJ_lSVf%7Y z$12m4Z!2Er@)7}(s9Y;?rhy8O!N=a}T~lk6$FN~n{obgEG+ulO!Ezg-jS;D&G@0U< zwMXI1B$o+$ldIw>?pgVMhLo?)@x^6X^A?6)8FN47&1b{@IO&PJNcKaE?ns{CcOrmG zj!aT96z+!NQ|tm0_X+4Q_ZGx+tWOP-)aFCMp_e6U=u=Jfrbq%#_QvFD#wyStD@u$Mx@J4k69@mZU6AR70cFVWUIZ9U| z_=!x4(zihXF^VguXDo;F-;Y+Pkb`fEw1i4`|!UAE}m-k{iS^w1qT zb9wG-Mc_#l^p>!gbmu~&N4W0qs^4-V0`HQrN_=iA&!jqL%VW~%TBGo_u2{)yq|0VA z0+Sbw+wo=BPJaR|q)@X7KMm;HhvO7Yj1XRa=Z)t71SV z56Y52iA5)pcL3~0xQ6}q7CP_6fxytwxo*-L%SdwsR2eFyo2ZA@;WsxPtHuf6r74mO zY^K-8FR8vuXEmYgI0+$OA4Qf~ls#0ij^a-pOkq!nK?I^gmNO`l2Ti)uY0#SUlO&z@ zXU8}FIZ)7|9>yB4&rP$;OhOtHa*nNhd|2RXSqp+7jIc5p9GPXwk_|sbr-4)1x~(xU z*qv^W{EALf=&rRfaIFC=MI=+GS_e~# z*(%F%UL<^8%EWQ_Nbs$agj@$Exv5&!);b$*Clx&c6I3j+$IFN{Y5||3hxOQ`jg64t z;ha;V<*Uqxl^8Nap-nMWo(Ppa6-s|!vk84qHqaV9DiEzq6NR8cD+H@~G|9YmGB8_< zXX0#)Hxohj%`<$+KXt~6jMJugH;5>uMEU7pP83P(o^hWMv8qY5>EHY{o@ z#5=*h&fcsY>EkU}5?@SG(WdqP9dDA@z2CHm$9!EV(f;o_zzG}`zIJBEg#1Yy ziBKN%bsz4Xzeklra8y7KmS-aVlQ;?z!Ge4p%6>xp&$vrW;L-$X`mg^aj^2VOAYX^w z&JX`HZnFWnG_Pp8@875JCKA*mLm5-2_%p6o=VRjd{VUwx2Y?g;2N|O5GMPW)J_&xz zS+UU3{yqSd4>-sO%9$ko9hczoIK)Iv{`Ud?$4`t9LDu)zttwWP;d9Q%F z@uDm>*?SusT>DptjaK&@7sLIDiJ6znN`^DU-^2XL%r@aJmDH#-_noP9OMU=CpunqO z0`b;20qGuXqTu{P-Uj1Idg5_OuYo8yZa%)gFz@GM4@I#h*!zrKkhwYAmZdG7J0v9P*-?lwM&U#&w!yl8faT-;%yr{m|GO_-d3TT{UO^~r}1mdpEa+OtN zcE+J>X;eBZ!oQ@Y%(}l*&6G}}1UY6I`7-;T8}&+ty_|2sfPCAIN!?qL_O?95$HTZg zP8ncZ8|rvi z96C~LgBAb`*Rs{bJ7kln$t0thnE1gS{}`#8gEzjS+A$k;Q68M{F(iAiFGzg~{3#yR z<(rc%2`H9i;$i6utK%BXC-GdM$)*r*H~073dWig_$yDVU=~k}wwJ=#g++nY3Hs#I5 zUib@Pn77-@Y-Axyc}hZHAI@8qh@z$g$q>oohTZ0mv}N!V1~E!h;!_wfjpeevELQJG z2ii^YNA8xLli-6Z|+>+fzOgGuaZ=MwFV5jYV%LAnY#ot zGOp~GAsZe*`X~YGB2BIAA@6Z>hUxxY_wP699=z{1sAyph=eJ#bP^!LWh^gYonFbHVMr847*nr`|dKD7$#;-dH$|E zuLrnGk+P9SX`0a%cIDi$Xlk&{HnNUC4ifx6x6l9-mTfP>PjNv&NGt%JJeny&Iv zyT(>MD*&A)N?PNx3TwIXSiOZpMh?#%)&~_T3S0?C{FDJIt~Z)t8n4doC=A2y`zMY< z-N|^H72jwhyiJC`i^&rfTw8w+-uDv&e{wD|pKhuT8}-O}bMR-e@XY?T3z2VYfyPCW zej{Xtw=qniBQ-qMfHI#*nuV(ICzbYsN}_}Gz*O;~FxfxEY}anWD6FvloNpca;~JkX zIS7Zi&T%gSSgeRGxH%`1IlAD{_!nST5A=p*>*9J+l=4AuZTP}}Gu7EFt2m;ciY=UxuO zv0o)K*qD%Vix(~@?RH23M?}9ze}fEJGGf$qOXGNS*zogz3=cp026k%H_|hPX(Iy@V3TU*f$?#{zrD-)*n{R;3P4;ds$n zdqxk~CI5*5)}ln5;_%`ToHaiKaCPn@F$P2%tHgQcArR-tSG+oLoEaKWwTw z`iN#{|KAuX$Vvu5OvY(ueiluUifViD`S@TpxnFcN^;oh@zY|X3wFRWRO!L9Z*OEk# zA{&3g5Q;RMVBRt1skiT3orn-wTu`V$JhUGACgcEI0@k7~vXXjbnP%Ss z=!FEtCe%QnAT$W!uLfNnBOnnDYO`)^v#hr_R6dlo%+y1FAlt&^>`N6l>6rDu6(!ym zAoR-m2FLylnr@GUydt|x6U)D55`IZ` zlTI8)2wtBKir188kbzDyfsuF=ZH`Pbd7z7UVt5Ff1qPLJsV}M6(sbzzr_GAk0&fkO z%4Pt|XCWJeC}7D_*i6OLr-@kQV5 zE>xuD>o!O|J3knuRfaeM?jx!^nB6#WVkxBuF#-?9&PlB*{J7b z=~9*znN8Tw52M#CLZPkD>pQWEck1gb%9y%U4^oJjxc^xQg9P5>(l}Iz;#AH#qS`ff zTcs1eZbw&drYl^70rrnp!M`VpifnJ;J()djJ^kBYd`wdnkJyKX_jBbUpd|5Q^kREj znt<1dC^EPH6#(5vaaWl4#K?Y@oQ79k2itOJ&ziSMyq7L34V7YM7;r5T2gm99%t)?} z7{TMNI+*tnEa_6Q3V;pbH*%>6+ZP+(Br=DYnEr0^+ z62DFSe#~i|%su~Vr>GlKcVNT^9_jsjrFK-27F~ZFYh|~=E~#vkRHpQU0s=Ls!_f-a z)yHXDw&0iSVO@8w7s_$58geZ{uuLcl@N0u6nKl)nN@m6`7u&=JZ-XN?`s1d)8+>xH zUmK5Oki2uLFzWePlIgrrV`sLWSmpZ5VCW?}wOVsNBzeNc;vU&_gnI6QYlC4|3b3kR zLCZ!=5Vqf`r%n@n9wkJ08a|RCp_X>$upx!L-mt)D^m9l^;r^-=Rnsh^9V5v@9ZhBt~vHLFljgNIE*+% z5-1poutII_Ea816Nl+)o3a-Lo+Zf~Rs`_|9Rni*4`JPkS-5pBD|B*1$#^^- zt-S2V;Y_713dUe9ls6v@Ha*0dX<15PUE~cG&XnqP{Y_;8=TYz+kij9B#xgLdmB@zLY58>B3#h?s$B{J5ANo2K3bwyd zF62QxdA`BEgKJU}Xz;}qL-=Q!53;cU6HFymBA{mtnX#U77c*o!v*>lD#R zk&a+OT|UGHV&I@NNfEa*mel_GWVmP%;_75jz_8iN!}jLf(j9GQs-)~W-eu}nS6xhB zgj%EC4OASQ+za<%j6sdv2c?IM#_Qz51M}1I>W156no_e-Q7+)Sg2us*GKmhAPODxj zQS~7a7*n$#-7L{KAkln#27`Hj9A9p_@Wz6BD8XD=Cjw!Uq3ayK4oapXhLSOKJ}pQxklCpgQKB2B$k2%Zs` z)XE`|c@w`3N|fUlO-JSL&YDRYf^7&{4Si{KOC$*6o0c%C)PKZ8C?mB!Byksv3KruF z*yO&SZ6tBiHM7!ju8Er<75b*os_eX; zVXl$q>nvj<;?9h;P#d4Nt`{V6j!KX$>XdcRW7j!>c>_KZl&**2Q}n`ic){X zGcL0L?Up9EC)5*P(RD-g+rNsNs%76+ypIL&-NElwi_N2mtVonM0HA-|%Yd*pQ}LxK zpMB-%yBh_)}a3LU-k^0SIvGG%BI6+jhXOfj}{Q`qpaR@+qD2GuguLYoW z8ZEc_NsK#<>S@3V0Ax%Up!vri0pXe`n^pdBSpj?!X!w^A0B5<)X{s`mR9ws$JnQov zAoHVC&Bv4;yl^tDj)B$?+s<|hR1;uNbRjU*6Pb->h7YF+4BHf5d-0mSoh~&Z1L`|I z$3WbP;o{)!t#d3(kZ3zr)ip?M3nsx1Y&c(Aft1XsZSxkNptYY)O@X)Ry<4+w1HmR^`uGJA%JBY-wMQy41LxFbdAD3ZVq%`pY8Ou`N5+D zWI<0hd*4{6))^vs01lguGES$N@%>7>q!{p~KIIBmMvH1hTZxs1toe7 z4nXZO>#`sepuO3DKp$1B02QfR&iS#;ARGEup?N3G^VM8+&+?QC$ccM`AvP7Rj?u^1 zTtxZs^5xLFpy%neo&r^?Fetb@(^QY`F)H>=d{nj%w{f-mGF<@AXGfs5!)87f0%kN0 z8GC{?Nk`SGI9t6elT6T z{=S7XdiaX6`~K|cFxHzF8J^o&=U}@tC5+H0YJ7zd0rBY_ZwfSYdx@D|2)BSRA%8j7 zhAB+Az5lsox>4Twww5yr6&zG5ILd52u;B1H_+TUZ&tdB38}1*>I|_0;?Cb3pX*RXY zeU361jVkEL4_|z#GGR4k_|ZdwbkUW;-1;qA$K+HH?11O2W{inrNE#9}x=97ExF_^oic*H`}W{HxrR>q;VYcR*Ux%5P{VW zjN|~^G>P+^^VRv*=p^~VxbxEcxjF4m&SS!_(?kO^KCk#&a2KIMybZjbs@HoRQ|o*Z zmB4>i)cBc@J0|}y;x=rg5$g%0<$8~@6#o|(PuN`8U;cGF@Q^f4Df__2+k&MaGEzRe zZV~)_dhV@t;S`L?ZpeT(PkjyH%p`7?yVQBFDzNo5Mr6b2Uc0h2T8q|ggWm1!Aw|t4 z7#hhGHyHg{ZZ_I=g)l%U6N1&7;E#AEx8Skq&Sf`s#8+pP>^Sz+nqcc(#cI#mLW$XV z=DFEyW8M`u`wAL~#KVk*%ToF`i%D3g?DA)qKW=N*FpWC4GRle7BLv+QkO;U9rGM;H z!8g}FCzB$6hmP8(UkF*KXyI|^9~OL4*Rx4EfT9+c;|xn^s+6QG1bugsV{DWfxDNc0uW%ONddJT5|AW~$$Q?G zyy2I3kmPmuc(|*Q;55yZ{fsPtm%SQId!;?!w>UZx7qfd!XDFQPgj^$NCL;^;xfWDDod;fpfhRwdE@VRROq#I>*7e&Sz5kN#)RH zqg0mNvsO)Hz)`rl5TkU#c~iT8xH<-WrRX0C5UWS);)KAX@D+=(@2szEGJgg>v`OEk zmM-N!CyqT<+@q>dPl}K*lsht9iTkn5gzA{hYd${QIHH*0rGux$rhw*Vtot2tuPVLp zYEU3KcNW$Ibfl0~8FXvY2N9$+1nw^z27D|{Wxpc`*jVdd?bmU+@P-m`a;m{SUzgS^ z)vO|5{Ie+@MzDc7j%l zgcrnp!0}pZ5&7_&jnE>{p-F?bwD2&o6vZ)}vHpM+mjzJNaK0|(o$+4&bdHlBlxCOr z?uOzWZb;}@sh(ap=9Qam8g3Ty*fk3DUbeH!(+j)#%1h=Hh~?U#9Ka{_NO-QAjM3IwM^L$Cw5)3;GQ)WUF|E z>h`b44JPF&!|}~kyPV~M*nt2QK4qi76EH#v z*_q!G&i~#$-%p4?X1-VgE)xMtmQ#O(v6cAD&O1vx`+^S<aX!`)>bbj{3*5Te!5Uk?N$3jRo;K1C zz9*Z5G+A3rp;9lubcS=q2ot?-M&p(W>J0WrC|Ed`#?-&?gBy%Z@kz%b;T=z-6ZOLG zXSDo*I7Ea4jO|i9;UbKw0rNu?LlM{i>G`I2Y**>>KF*twz1gbR*ROm7K}U{?&h&#M zRUt<(RX9-xLR`^ilB)~?&dJ7IiXX<> GLHYoYY48P7Fym@fXHzB3k?~w{;9v9D| zrK2j4`@ubJGm9SBi@5R(l|HsI?jM&7Eg|LdxOEzNUC;DGuU)*Fk zh~dHDzo9=9VRM{;YNsb5p2!_}FKA)lJBh*6c+jxflXft}HOH9i<7H zTXzq0f~6X)P4T(EQ7Y4i=R0t8m!-Sn6fQ zqx}3pXmHZNAi@YXs-v-@!4zR-Z`L5){uDt77Dv$Zl$yE;h0_wZWwsIfD!=K4fj%Ej zNg#KjmJN_oGk@v^ld}HQK#K!*!sdH6%PsGhY&!kkc{WpB^7FP+xb6DBFC%SLLqP?a z92TiiuSuYUO%a&jgolf$Gbx{bzA|N;)xpOlexqe5zQa9VpV5yM6lJ| zPgE}GuMJ*SJ~R`XU21%6xY_5i$^q60=aVeoP48}|FS|3-illw#2cmTcCm3?tw4TyQ z1y?O98&ho>$0cLl=HgpV7O7|Mw_KE;6x*|fX&;3;==to=Rb*-Gu?XI4)QVk2v_~2B zs`mJbkG{%S_wIsn`;R{{fQv+K-)WB4AdR;LAx6^j18heqJUR{a$2ZkiPmV&=Y|6e? z+pL&6HHVYCp`;WrLE?=(hoS$unq$%S`AoFjqp5Xo)^RNGuGG12Tsl{;tz%}a*zHP} zyP@vl#}XW(xg~FfhSe=2^19qW-shR={J9Tp%91OdCf?g2ca-;0?;T<8`Bu9 zZr&`4t7~^+_Zt_6Pxo>)Dry^bo_{1N_l!Aw6zH3UfHAdb<_Z{pHsJOy->hJE!mTaN z69PgILi~-8lFdg)M2(Z083&4~)|aQEy(-&6YxNGS9d{6~dwUt0@{68uR@A{yu-8d! z?r>WDXLFde43@Y`B9jH6lXyH~j>f45Qz^1F;)<%vc*PJZhlxx13gl12KH z+i&OoyfaNsM`3FWS`t}zJ4KOUydt*i$fh)xo~(v%(o2_azbdm+DP2#M=+AQYykgQ( zQ!P2}Wxw)>Ymz%;f>e26H9P;@m?a=5^Xr248xG!j=T|Z}tcdqZG_cmyt?9!+(rC0E z0#O1eF_&KxY|E{Fe!2+3+rQeK6Mu~VYdo*2%~%hzl^V*Fn=w{Ugcr~iN7Y|IK3ge% z^~9KTYVU3Ez0~}i!p^ZA1vy)NHR3&b5Tpu{QE!xVG_rSY`-29zgfYR*^$tVDgRs?^ z4*D2O%a1qRVcQKz!sSCy{om4-#5F@0*7Uw;xR0n+!Gz13tlI^Q^K>21I?ck|*v{FM z%N~4Kl-Fg70;QBOq`sKy;*~VO2$Au79-~1n6_ftKj;j0_Oe1!ejDY-jbOSvBmy-e*T_=jo@TEz?GBRqmz&tXV+4<%6uRo)c#`u=RLD3{w z%S0<7H7-DLlC?!|^mELbqmfkuxnpmg_RqE`$cXTLA=cYJCHoEs2rf03a1KdEV27Ux0Z}u<9 z-jjrblM2U28ausmH*)zl@ zJ5=k$-@%G-7whxoB9gEuHhWp44Okx=Y`OPV?DAxMbrR7^*{iLO4NS>G6ozs`w zwe=?&&&;=Y|2qkk8;7X_1MCV8w2Zn+taagDv_kR2T>*xqbY#)QOZ)cWxPfp>6ZdvC z{d``yn0!X>7UUj&q7Nf$In0W&a8>W8^$GWe`Dd{5RgkcI^|g(BVt=(E!{nix6hyq+ zy%b;6UbH6QW@f3$o#aQ_Al2d+D8&D{s?~Xh9=T*br=|UAf4lW7g)oXu^u(-_iKT-I z(g4261-XecFW0^$&v)jA211;xqzeo5Ih%BUA@u%u#P8Usmlo1;2p@G{$n z8mIBaE29e~hY!3@V3E+rZtOuun!i9XGP~S;Ury8_1w|(YC@yCTGszy;M`q~#wqis} zk5CCW*dM?Qo+|&c&xCAd#JEkL)LW%sVT`7|-zFUOu_JYFCi__)mw( zmxNHx0!R15$@fz-?I`KTQ3kQRheBLYDL&kq94j7)C;QfL8e@ih4TIyDt;7Gjk45H1@k@|j$fOtn;T7R%y#fm{_dlT5G8efQQTXyw-Z zdC|r*3r{~l^A_`lOnL|otaDJ#Q_*gkSXAe})nvSHpH*+(SsTrgJ)qSn3;3{^ItrL} zke+Q`p2r;)OMHm?mo5qeqx>ekIVWb?J4s{htYEep$pGp$<85%#hQOJ!ib3(VtRoOY{IDrV;23%@Zr8b0E+n@~~h zOzx;&A&dd4yIJ>sT#)fWZL!Am~~{P%Z0 z(@l{}$zDeX)gYZY6K^N<%T%t*qx(t4M2raNp=a& z2uKN(S}p2?b1TiE(=-x2%DgTvG*1@YM>=tE14l*iJ4>{YH9+R?iFDNg(%~dPVHYF9;C7Ve zcrsV{?eRuG&10Dg@JYK%43=9(BXh?|-`wTV-#@8!yMng`_7!ZgMIqlX8B)~ZA?QSa z5(lYJD|Hx+Dt#=s{ns|nph{+VM1;tuD7x^wo3;mOG*8$V@Tgbkvo*EvV~~JdJq0kX zz_4~Hkx+JiiIG`DA_S_*<4)rWcM>!(NV}@5H4+aNOsqhOT}jb6#_o7RlW)DKDJth- zv;-e!?QOj0jb*6uaJ@4EUfB5_&m(_#-1DGs@@Aswbc;o7y@z4@AG6xOQyJ&%Ep-YEXOE0%00R(;M2gY!&e|4pHmZ zf8W6(uLPPspvDk>CLn;jNpmlMeYM%!o3E}bNuM#yI)MEtIL`trkVgJTx?tWXpyU5B zX9Qy-tZaK`s?joDdUjYxbu+NeVt(A-OfhukiPhUlDpqfN9)~b+$eq!O9zxXt$U5yo zi}odz%J=HsNqqkAXDgK-y%s*Yui!PiLusS}a!nX=d4jevc57m{i!yt3S7Pm2}Bm@3`zu=NbJ}8ow`F8d$YO-IJFRGez;CRTV;Kt zUlUUA*SzD+)KA>4@48a8e6HtVfb6*C@QUw4$b{E1kYFyVz($UEH=HG_8_z5mK}!Y7 zv9dsSs9*#y6m%5e`_M!Muhx5`Cm&XDQIF+-9madxp0p2IfjDeYIBdF+0JbZkLw5Qd z=d8p76Y}_Au}OEH)MV!eQADb4mh2r3kHfAtcjzbif|&|?=ifRs{pTyl*PtYu0WI|s zJ~<5Z1k8n?ZcLFsWWpYrT%L}Wd^Lk?acYTPPjqzIsnlcU$(XSLB)<75a4Oj0J=bNl zB9%?j0(ycO4|pg?rfs2}klQ}cCq_5td)%7aV>yJ$!e(b&ekXYjd$s2xqBD;61g}bH zz@f)AVPTqU<5#_ds(;pd@8sQp(>197E(M4B zQ?=Cq9S$De^StHC+cUkx`KjJ9Xe6+2ey5l`{KlXn54k^k9arac^rKdEaT1wAo!Nvb zm(4RVkwa{0U&GN+J1&xU!P+4aRQ_bOi?%GKl0beFF{x{n>-WYGrV0pYBXm4LHvZze z8aNRwiLv*T!|s%JLPqTK^B1kq+n2zlPTEp(cdFzRjfhpX-1%)&P~Wl z0lYB6lRe@n9`0{~MT@+tiRZo|VZJdrz(5$Z}ERe`UPkItoEUoPegxe7MQ}T93 z8u?Wxw`%Pz#)K7kvLl?7!tqvX?^-PO4kEPeLoSFE za+~CCE-RD|C|Ykts||)(aXRPjW+n|9CiGm|+}l7v)olc;J#GjH8(JiX>w0Qu-%SF` zAah0EGwz^whI4gcRHKzUuF=wZbHv^cD31~#!1ae|{Krxp$y*t&LN~<(zg*?K-tKS; zLtr*&jLPZeg7eNr5E}qc*b`Nkk-c$wdd}Drc%2ml)|dGRfLg%#bjcjXomV&H6Z?sx zML{cEU2Ap`?#tz+X92iqc<#hr*exzBpGBW?aJwBUaT#_oTAm~eJW!C-s5X5*+@Ktl zUuRKF)fZ8``_8)L$Bx4x$7|WzjlGc)MW?yt(89Y$DOjyuykv6c-GgHs3j$?qy;eoL zyff#9l+(|er{_ElzBk20bOTI|E#uC=;E*k@|^-C zqQ)8_O*RAFxp$N0&Z_I^L1nE@CRN<~>Sdg2jOV+FBUZt-=O?t~9HL(2=OE?EjGg2h z&_%s0dU06uiEgY1OzyarUZv?+bxFQW>38&i6f-Xty0NlqrZy@>b4S@W`V5cHcMqU$ zvW<(EaCLq0AZS6$OV<{MWYm&AT4I1%Q0nL99k8zVOPB?F+xJ<9eA;ArN+`3Dgyo1g z^d#sn2FJBqAq_R<ZzKsn0Q(LH)6Y>EpbgbSky*u$w9hEsLI@g<80G?T4n6+O^3Cv6mj_-f$3iTh z7eRp7T8o*oejXJ6Zj)XR4Z=7kd!-`~lZtB~5u_|}n^3pxwlrl?bxYg4cAagFyV(f_ z``d?W^;Lwbjz+Kem2)Gm8`$0jd>k{sE<3b%Kiqax zQ9Xdq4r$^z7pxvap277w{yTGQeDvM@dYgz&u$L#pAQSEvD#8wfo>;_WTs=LFifD}8 zZm(@Q@+gf8f<7C%@2kt!K!Us2Mu!l7&wI$8=&XjuM!fY+Ce$z8j{!cG;{awIc|=26 z4I7$zXc6NMr#r|3`a41;@Socb@Q6xUyU5EZuDjT+B)6$SJORr{_|%Zce#R~UjVI{5 z=5oKThW@1Ht8dyO&p~}&)DziBb2H8x^=R221F6CS3P=@vV}UnpcjeDqZywb!^nnsd ziYla_fsZ(DFE&t;%j4tV(hKfP2mHlD=&Jfgnlw%8|6~pBx z0&ypey55cJR4}K40u?F-4!J)PzLLzKfjufMTxU-Kp~t1n0Wg4&TA$-okIj*wLF~my z!@5HtB;`d6N@kQV>v+TBam!f-f|X-VDKgdOF!pgfCd(dQ^_DG{gbf7NG0v_*HF<5}Ckw;XwK#8K z1xelp4bdzv)d!Tsrr@d+%Aaf|o0#rLxxAAH-@h6Gr z7&K15z4HqxTLP~$Y7o56ei-krdHb&p%}*XOqIIGWgV~BEn6)fdQ-94`ZHlp+$&HG0 z#jbk5^^+G$L-Ce_Q;#~>OYzrXuvO1kf}nn|{D%fp=_G$ym^1Y9odU-K%}OD;Dc+&) ze&6X5y}(LpV#_ERzbwf+BcbXEA~)#3G-i1U)~7L8c+hiBq0eaXpshhKKQpjkL7rPS zNKfRey+~*puam|b&E$v|*U*Gsd8%!yhf|Z#dx+b`*F;lUCHulDpN5`A1wnQ^$>eL> z?3BlO+c}r$zi|AehifAjNp`^X?do=}ly&n|b5W1tebQn3rJ6e<-cistlPSiKN*is! zxl|IbGThMO4(2m%n#pqK5fu!_ zxA9jL;E)Rp%>T9z{_o|Jd;tN6BDE8+)PIAC`6&SHX!DEulRx*<91hq($E2q*|0yp$ zezJKJ419^lMDv^B_XO1m74U}?q5lpn``W9uCw$02=g%`d zNq*$|eEgXYhWzJeNgQyKoKao+{-VK`K7xVWf@0AB85R8bzjbnC{RH#XN)n=k_1dX$ zME~B?1MfV-lAz5CI?#jAtDe|Ln#o_vjN24pTR0#{C)A!}{!< zMLIRF9@&D6o|Fm2_XN`?8lLr_%hP?m`e&c~A=aGL zDp9~~Pv-ExvnSr(Vv@I{81=W(#Qya}e>^XSidKci%$hi}(TY=%b<}TNiG1BU6$BdK ze#b31HNKk77~d(xe_h7Tlhr9o%-!l6k}kwGZu-G>?W zMTlQ}9){X&4NV;E9f|mX<7@=bw&;1vF+-lVFBmKj1bJ93#{?P49rx#$;@Wlpi75i8 zhu?}zFAn&abz{vE{3IfEBLId#+}j5lsP3kf8jXsomtJzTyzw5&xY1NdHaRkZnR`!d>ecb}}qwB*$(|Ruycnv^WXlxRZ>a44uG;q}P;D$dDkbbJ!oV6qy$N0vZ?*G`d3WJ(4Xxent;q@-^a-Y*Jmvzexc!%&^{0D7oV0jfTJV48epmVu$?2SBS~wHY^#p%B34 zkwMKnF$8{O%$xB=fz)T0`UxDCb1AdzdOfW87yYU%73a@+3VTG|fX+#VZ6viVP-e$w zO(8@Qf7HTA8lfewpUlewjauPH9gc(gi$;F-(=_Duv8IP@OOJo{$u}tA6`{&&uN6F0 zn9~7$2{w|iFKOrzao8|oL_g=mb3CeKoRP*b>b)|XEGbK(yytR$-KpNBp9kkY7BhJI z#$&z52s{5)Bms_k!fmS2;Vd^n6%BzEdyBiLGpb<9p%JI+`FDB( zG#YY8Sej5LDqYM&oRcHU7Q?S0-1g>IZX2q{TGxjjFXhePUs958jbveRe)}+wx<9*W z_CVzOJ)TurBAPC*w^Q}E8c0MyZUr81u`Xffnr@ARa)bNL1Ra$|eKo7)kw7f3UV#RO%Yl1p(Ubt*&R;)mmSqCOkU5KELvPt+n@9!BweLGPnBV{S1H@E1!{?BgWhw zLe1yB<%qqwyO^MCn6DeW(U7b0JV>e^&nQdaa_3gSF@a(1ek~_7)nE}_X2{QnR3H;q zZU)WIaE8mye5Z51yFm6!vE{X&v<@LMAvr??4c${0 zG-x0B^;~0;Co9va{Ia}ZknLG0_h`U~AdV1H^KDkIe147ty2+_g}XGIcos*9}l+|1=L!6Pg?a|`n<`b zA?|m!RnKAs^7ABZsruWJX(rXeyMO?K(e_WcYph$fW`st%A>*}ya8(4uhx?BTvXvr{ zD%*gkp^zd+oak}$zu0@rs4Ba*{a27~5Co)4Ko(LWU80iG4H623t+l=^!=xSZXlW`R*H22k4e9KWl9++3OtlO9pui zD~F$ukNhFybrLSc^AWZ=Y2rEkjwf%fD;inFyTq?6+t-+AqD-uFV+KN;tvIc<22ZwQU)W zW0G)bV5+A`2NCV*m zAmpK*A;kq>bl#{$-d^rr{*R~>a|UL)TCMU1kJRY)Ly0%WxBZKV0DZ@Mcj`eM z(@mu^wpnP9{wNE&9k29n)+#lhmHjAgiBHE?&kXhgB{xTMs%#cR^`i*{*{8hD3v^+q z&&%FP-Njs>;)#c9RDIAxl!c*;q+#4{J~mZp!bC!0oonP<3**d}_b9x`E9` z$)C$y`)00GLk+5qG&FD^295ifnA4G5&c(2+xKq9R3Y~kLdRgs#)fGmPa3@u3eq319 zGsKpM!%8a^T+2lfcab{T{OLunkaZMojv~!-9&Qh6U>;SxcRAuTKYClWo!KH2Oz`xh z>xo_R?w`bQ7fx(#1S6I_$_lp2jv6FE5-S<=Q^N}cAE$)R{FpsqMZ{Bf|DR|N?9>MJ<;)F@QkspR4 z|G1tn@Li_5f{a-nTdwvMf}!NH?&U7SU2SJ(1?|15;V;N$Jl%Wr>mdg5NTEq@FHi*B zzB^XAvFc`=+<%F97Qtn_@mt2yZp8T${kW|O7a;-~9l{+CoC$pxllA2Lu+;`d!_If^ z83=DAwx+V;Wh}DGt96aXB5RY+_CVH!yI-nkl@bZ`SKDLc7y^SVnR<&`|Epvb06#;d{e zsy;c4XWrl5b2D9SJAu;?dh^4%uv}%G9XY6ajm2X`zgVJB!?o5CAyv0N zdFS!pf?T}l1rrg!tHaI#>^;zjku8F(ZxVWPCy_Baqz32x5K^|7F=%okHY-t0I3l6Nqh zdoB^H$gy~6Bf<0aQ^WrE!?nnm>SvAPKbl@9mzJiN*T0&Q60dvL{?Q(2lLE zFzQX@$gR?6sy=uhP^)N2qjMwmSoEO$P0X)Er>JUmTxb_6d>8gUxwo!5Dj$^Lztw3N z>c!`uDZ5)zZ%G3}BAFhJMN%yO`;uQjZ@+(@KyY1X0RKlNSPEVU9|Gf6Oj^!Er@-)4 zzu$0Zis3X^MMxA_vz1cj&!>p9^!8B zIlhm`!^wYHNQBZ^IAVdVjpgMeR#=L)JnMC+X)qdhn`KvQt89C$lY$TfMF~E^Ix@YE zDjq?9V&SXZkB&71XRGzRgsH|2eaX`ge~0oLPT-_LhA({L>0kIuATN~LKt0HXlW3d_ zzG-Qv;I1bkeVNV%|DPUGAPd53%&?>zwAAdhDpE`<6I(sj%?^@kynXN*6!sh2)aPZK_SPR{F&C-4~S(` zaiHb&nSj}T`&HBfCd%`^8CVLHfQp+IU>lkY*iS2&qQea#PJA(CV zGrQYmo0dj2_i_Y~jaz6*J3&)1L_v!LA!W^Yk|Ls_3{sf7{sN(iOy3hi z8X$9f`;5!t6Jt_M8t$AOGx6IFkpo-zQ#{dEw7XTb(S`+7#%bv<@3xp`KX6#f2}&qB z`y;Fe6RG||3O1sTHYzN|k2M_DGGU+{cljLsnp07Nnxdz7IDPF&tc(H_Y%OH@o8O2%GQ4pG9;>h=Re$^tn|4&QzBM-Fzp=-4D7NmXW_jfc9q zxhaYL`uKY|ojxIT0TlEgmdb!p5%`v?dX0c~2JkD8ibo*oM_DizfJ(~N>v*pKLgysN zaVU+3Q%3tGxfLjOvOh4+b_Z-GdIwxrLZlg)GPcL-euFcq4- z+X5TjW_v<{jP&;*SYoCRNw1Wjer>#JbT*~|XYKkT;6{zdXH-Dh#?D4I^!Pbv=@ zg?+noxKbuiDC7EqBVRw*G!P$W&@fSY7Md-SN{G;}2<$vH>@Be`K58)QS4JEWybxdQ z3_4v7snl!oLVVF=!jh18{^ysu#FJ?%U~*S{S`p{+`o52;RD7~v@|1lkUr~0B+^xc} zw_srEQL77p8DqYwayIF%k;KWHmVhkWTza8~b8 zzWQQ$LKv2YtGYGUASP!)^AR84_-!mSI};0Qxt7>rzsOf>$x_HyBTPK0E~i|hQYkJJ z&4lrHwXcv9Z`+Z?I(_d120x`LlSEv8{l&$V)aGvPlcbFH1=>Gt22ClE5L1C{Gcz3b zy+uTV(lpxP#kX!ZH_F5@oMTW$qtzJ1E-rPKL&c{t07DVcQ-I-rkw?bXHhe1Ipe7?V za~pEB**6p@#o!pHF*_SY8(W?hAfzyz9yh05hr?!FK#dnskzf9LQjyQXoAzW%-v0Fw z-n+NYM~_h#dn-7des`{9d1q66)kzayN$LH38a8LcL7Qz7e;nK(t%p^XBM);i$I)w_NB_ASn-Tcj^0-%b;QoL>o#?r73k*Q)E05z0li@wm*g(Ss~m zpp2Ud@RT=Bg?&YaBgzf>8N2cmx@23mER9InST_cfWk=F*7$$4eGj#P>y&lZAR`-kb z_KH8SmHPeHPH#^((lt+nry3k7$YhaUy9=-I7tYMr*{UAbIkCNQe*bIHwKTvaUJ11C zvB)}p34}e;^<|HRC`&SpgJ4O8sh05dL9QhQ=aeU&lO-Jio<*?|inu2$fIkN5Lyd9i zutb{TpVtA8V1R-+7YCxTlUrXKk%Nzlm=UQlHt6pjNq$#v-MBp$iul^sf-%JzSmDrN z?|KfwHxOIR!`N>;E_wB3Ms5;T=9_P14^ScsAO}#2UBKl;;oO4mSF?Na(ID!iRAT~y zNGqVG7>?yD8hxby!`+HU9D^si=w1_|qNK~_i>(O+p#qXin=GUPG$ms-j^Q(BqQmX2 zc}7YtKEZbJ>pa>~5hTcyUKG#s``jRR#4psN&|UW2?VIwO?>4*PPzRPo7%64vehDdZ zqP{$922Tk(Y!=JOL#vmYb=s@RubOm`C3VwOY0FxZ<6iBO`7-%!jKYLyFjsqH5UEdw zBx3LN3crZFb7jcR@{_!$a4y1+U5@fms!Ol&{h?W62ctFUZS!VsBFgyYrI!fFLDY)3Zj_z+ayUrZ+Nec=$vp1TPigqjjL9R#caf(Z1mLd zN3;1;PcKO3@c6ij0S`<*a3RqmdLqckAbhp>=5$(z-Fz1R))m&b^!a4ddJK5gFMlVO zzUlyY2hB^k4a)_V9hXEp9Ha)E+5rS!^UD4fUHpTsy;(3zTUmSR%5c~M3!G9vRB8LLdq zuDgz5TE}yDJS_A6dWnlykoVkvXX1|}V^Z~M@ETBv#~5VB1h=RFE?0JHqvc{_K7mf< zyL^T0$&8wh^yoWrc>xZ;2ex=i!KRe>_Qs0;I4I`)n`l5K!Q-LT+KoZ@{weUFgyd>1 ztf8RLIrObd@M1w+9wTAIH1D6s=?4oPv7BjHyO-|I;L5Ae9D;aOw7#G=b8yssx`STi zLqGY)^UusEF9U?c{i?APf6QTCsb*)<%)w*tOz*Ov=NzYI#K8rH z*QmoyO|DZ6g-w=o?84T*Houx&2^UlXZ4Xxwji>hrLdJYO9-X$|q5=FzLv`&I#WoFS zVoK_Hbgg>1-(F!FAUZ3$obTlq`yFJ7T$Mc%mAilWv7xuxToarUC3VCUD9+Mw)o4Cu zhhJ2m2Sz?`Zfa{yNu%$PP#}z^HSYg88EV}N-{mdNmFT(YZBq?oGXRwcQX7K?G=kRqRcvkKZMqtw-?`!Zq79Fw~4w5(EB;7)P* z+icR&f+AtKJhg8R(UEWBF%4}otXTD){r)WG=WonQNhv3E@}4Lmn=MeE5wWiii!&ky zLj3LfGRYkr@VHEPO2mkw;}5zAcr_3+HpRFjZvr&|kg*S@&P1>YsFt zi34XLWFv)4HwX0{d>jcYD}tMzg}*vQs`*m$t=()wcIGh`(sP=|LQ4jm-*mw27xCKg z#J}9z;T1lnV_%iS?*1%1yaA$p>vEMYl+!dztAsmKyX@t+Cae2Di%p7{pRlV|(G3ud zWm9?8=>x7Aly)1<+4Kks0BHP+a`(0)~!#se>TRqwMYx?`JdAfca7m@d@7MJ80sz<7t~ zEGz-)ikijjz>NvztM_$(+#8|=R}=R^UD4Rq`uc;TCq357Jml-o>f0Kb7kSyZEna3O zYoX~1L~l+*I&a`@13lvuTzbMhkzfd>9Fb0T1`Tx0*1Bry2iK7r-rni{mSdGJy+2X( zuR|9YSHdlVJi7~ymT#UF$FzPrUS8#8GC=HqPc?m}j!*FYSeTJfq)QM-r;2&n=`gWQ ztGSn5GP9GhljR+jh&x8x*~b?Ax=$+V@@`F3-Dm50-wit7Y4*guEi8vWhQt|z`pYn% z0)isqSYap4(TK#LpPNiiR6N*^%?v*jnZgctK7kLV8ZedcehDejF;l15fx&`#|wplI{*gK?tp4hXutnE2&` zj6;*e7eC5Rq^kzh$g<4^!;kb6dm#*5NJOFojSGx~MMh=#!Ye?KnNI$5IF-+o*64IM zIO_)lz6EujL+JLzhx{k6os+2M@p6TC8@e|g6lnOivRNUV?eu!T%5^>;uKKAs*I()F z)hyKYM5ZN1W}PV8)-(T{mirni{(A#$;|~l~3K=07w@Bs-dLZBK(^%K3YK*q+I$_UG z+DLEj5)gEW3Hs2ABGsO~CE--@pqJYQ09u-|1QE0JCl^Jn8otx?Gi&fh<_uh$wg(vX zk~&7K%N?mFxRY+-GtXg|NY%#m*L;yJ8t)H2?&FH4 zK3#anNwuMw&+8b1WBUn$9Hpgo^@(%E9W4BBGd8T8@hnv2Vuj`2uuxdPr@T*V_|xjm zgYK}@jOpG%j`vbt^vLuwDHCYO(p%jS()i_9HEx&eWF{?1x9wN(JwIE{cE@GX5)RvW zu)^YhyQR~bp87$@SqU{iIvqVmdw+L>2#IHH)mPaHYqe~P%-~>mD1vBKOEc%cnlQKn zk5zko1`ez3WDcV^I#j#7_bE^M0v=7F9%BGW)u*^J?phEYuCm;U0)JdXr3T+BTm~!< zO#RV3*b@X{eZZ_VGFoQmMVyyN*v|a~gAzc>Dj}9s?ti4MU$r(UzQiC+t}(+igA0f{ z?4fAkgLpHA>6YnlIKbBA)DH)W37)VHC9q{_Cq35baPgA1`s#}2nTK3t1A=d7x9{xK zi1-NS>w0$Iy8c!?=xMe6Y#}u-+Y{F2Jo2M8(f(vpwn_;7CVA1bCEO-MkZG?nneV)4 zErq5e!);i#q$vI=<~yWl3R035J-s$sB+mr=wshpU1fVa~riZ6DpRalERC%p@TGYyd zsScv{H$EwU#^`W_^s}IyXY45IE;ltS1N$NP$ksy(Y25Ee0 z`vWyRWSDZWgnC_h^=`CdM&Oy{Xv!P%h^D?1V6pj-OB~?K9|a_~rbgU< zv|3w{H)?@cMU7V-10pB$jLSp9lSX8~cZECgBePKgvTbf6Pi?LJj><=5jIY4lF1K;OBsGvZ>O8xA%Q*XOpCXV2)UK?3GueQ z9LyFo{eHtB^?KPix96O*gd0BCS33^&xeI?5D%5h0-=6I)6#8`hBAFtcSNz!EFfFB6 z>*b1j&g!rwqC42e;g{;+uUoE^I}}pCsOCSeQhB;_YH%V|meKl+I2h}S6~frIk9^+D zw?%TfJa$2a-?D1-?{P}y||!@ zUe0WaUHTcot3A`PBk6dk+)vAMYNxVdMwH1D->W zALuo2LZ3UE8uL>q^ehO9-M7YiJHa@ee*xu|WALc^XmyvgX)b4G#r zUd}~;e!~`XA%Kw<7`1=^_6beS(4B)WFI#%Uv@$2H`jb-WbiognWbYF|)mbQ#Q96)Z zBUiei-*977B(b?$}Jzb;MA|3%GG?h$8n|V1~avoMb5#w?7S#ipUOAF<+EPC-J{? zQw|_4D1t;k|4^xENkit?3?57i56c&$-Fy?dwJ*)#c;Ns|c>9?t=Qx={`xYl(0t|uDV%Mm^!;gCbH6JVgi9aCW*)M8I$01!`4@pPwi$5mk5tJNjnPQ#gy$P&JZe8 z*L}if({yu!&y#1+9Ku_PD?|gPPcq@4}UAy!(ThSL<`Xo*N4{N9C zBarmGeDJH41I`rfsEqz>tcDP483=K;f{fx$iplf?tT;VPWjrM|0_NL zpVu-aiT`;iQUANhH(f(PZ-X9JaUu@2T7+n9dsh^H`>wW{e=IkXsoBbIkWZz3+2ePs zGnBU^y(5v>a(7$r!AM#iyEMZ|JL~SSdMd>JHsFb7Fxs=p`4#7}pNl9vW|UBPeeO=) zzW&5aZ|==nQpOjxdwskHvhpJQ6&V;MeR^2o`l_&U%>C^ejNgFZ};c;8-4;mKg+e?A^pkE0za0n!%=%b#m< z*U^OEL(Mz&&S@5z*Fzf{6OrOV`JZM-yj4(&5XWG6@e>KF>0t1%+f$wVc(5gkPZ6(X z=~KgpTT9ALXa>fQEv+IY(LPV;qZqV57Qsy3#QWJ&CWh~*pxkdBNihuJA9E{6KlMSN z#;tWUtefY%6#agjmWG1r4oL;rHZ`fT#x-#(Og)^Z))8ENILUY{anMgsaE|{=*QSKd zJb%=)=rv#5bd(-1PuS=|VVTo~zV!7xPj?zS#dsyl=8sz^-cnQaQX%1ryZ%x=KBgQg z^J97SAfy@TsDFlwjWWVKSI_%3RQd2-_0@l>UoL0a-2Xv4&DAz6xQYHGnOD?_Z~R;I zA5s3OcqPR+s3&Szjq`olks_%ENlRul|L z`C^Ft{~d`^PJuTCah?n${QF8P8N8Cp{I^clzvAd0@4fmYi@q6k|9xc-2TWG~Z~q}B zl~0%g9RDmS9G+FcqgVn*1~!a;Z3n_dxEbYmrkjau|GwhI2gb3rqCt9pXSkn2AH~M1 zi4K4LtCydr5;o`}lf?LU;b?J8!J9gtxfn|Svy`;USm0Cc$$cXFcOjr^G{OdSQzIPz zhb|2(n0{Um1w=OI|L@7BFg)9b)@ha>{2n(m7S$d>c$bn+PPGdm{C{&@gyBae9w8Ah zyzualAZ22bh(>&CM4a%yTnF5Ak3xcJ3el*qMhjMBC|LfE&^y`Tu8KNs9<#dsyGC#u zU>UhLItu?@(;w2{Dnqy)Rs799w2}dv#a6IU^52!l>3>-J|F{2{Ru*~cZKfgI!x#T< zL}5ETr<;o(%fpcu0=1s1?HwItA2*!;E)`zjYj{CH>Sm9iu3?s-4*nbP3#8iXi2v&s zWZ+rzeEs>y)M;h@SWdUMb7pLs!R7o13G=YP%TJ+YBRGf9#I7(o^&gM@a{p(?$9Zw2 z6TDzzn|nGC`u7$T1-BqSLih6Di^C=bF3!M_p~c@<$RETFRIEsUuU4Qsc+;miE?b)a zWCj2+-$4Yp1+k%Q+n4fiPy+=2gBR6J}0h~@A1|89(BDsT&u!|Pl9yCl#AI&cdbI&*OS`wAR3DyZ9Vh;A`ZAr3ZNKr7L|abDt-hTu&>>Wlw}FAWZ2o>ArG?Dy)N0)7U z{M2US%$)f87yV+vu;+`^#B|x{E_A}53w(}^3Z(}JR>tFv`Lt4T(qNXGJAQI31-V4S zAL)UELbZDXg8q55d2#(m6vbTL)Fs6ORz^Ch#)KK0d_G>~Lvo%$x?)^RZvHQ~?7mna zTP{?{)gWkNADLRXJ~`E1XpjVwewbhVDXko4hG|STtneK=qE9guNUEHgmWpL$I^SQ4 z2cpwn;1hwU#=u%3$_2{t8%(N^Kw$b44Dvr;PYJebTKy z=AcYZvoB{LLCp5K%x-5?@jR6!l%&0xPmu zczWKiQq2aHW{t*?sB?P#ZV5oEG1y7h{0#J#amW1snE#+gbP@cNKTX_TkArdBVVQy2O zz)>?zs`^4lrsJv(OeHz8Ek`b{TjQlIxSgK{8S{}g!UM3THb^*~CH%v$lqf{E4pp09 z*=rXuiOn`a%r#y`TX8OTv>2{VY)Q&9T1ucBm`r76H!l2W(v7HzJ=q#Zk87aRe=c3! z>0Y8{oj;ucZJeqORj~tFlh3GxLP9j^Wtq+4*-iJ(?LqsyT zh2<|de&TLjZ1<1lia8n})~$#~Lo zMx27n@|T8Zhk{(4mT|svae4XYn{4E>au{_?^w7fMcwcwRy><+jwx_a5A*(otnmUnx zxJ{F9EH#6lol8ZQ+tndT)1O%CcM&I-%lUZO=ox~N@r*P&14)b@7d;fi$OKGMs@)ni z>1p5{a190$Bs5j8&UX#KYnHQR+Ot(=hL-{vIy%PRxkWrQ&qD6}IbMx8?qrRlfqrn zf{xF?P+GlD2`b#^GnSgzryrj6PBAB+R@un5Tg;Wei}@vDv;LDYnag})`sjB*i`+m8 z>d~ztGjrRUA1;%UWR>EPnUR?gRkZf+hx!KN-Iz9~1*bMo;lanZZBGv)eYxa*U1hdN zB(Hs31(;s>2DK<+nkg;CHvt%h6X^weTqR;!g-Y4MVP|fWC!;(%1a!CYG9T+D#=iF=?DO{Olx1MeZD!|~ zTpCB-k(~Y_a>p`wp_W%B=y3AGa@e9j+uikiQ-3{xdBKn^fFqqNY@s+Bmwd_9QD|_^ zZE=zE!KWqH=VZKPYyb=1AH2ZLRv1$AEMOC(ZbPoG| z2+!<&2g>zXLn^FTCY26o6rN4Gi&!w;qdKnN)S>jFv>OhmeJo2s=*C-KGjLPw^ol1PZ~@xi3FfRWKJ(&Ah?zLvKj* z_(=YPKl6$65uD%8L|}N4;#;Sd4ol&&9}&Z*=(DeK=l)>>dEL3VXtVaL8GJCFp$gIt zy*lOO>e&rMnT9)wXp??)(g?S$yaG0UO+uTPRDvy(+7_J_MSBZb5DY^!jo*qe;ckOzdwekBzgaO;DyA@y173r62j zV2(+O8^f}^&AXh$P~2Fbe+OThopX0~&D%sU<79uAd3j9g>) zAp%4p_TJRlo)m7FPCAQ}%|q!j@Al6lV_R+uI!Jlk)E1Q+oc}A`-bacF2i?a>zQpID z6r~;9*TNd-`wNqnTYfCVo3ih>9h_D5ywZVSCdSKtux!H{I7QR|3U(vN{Z*QVk>$z%A znVcUZtgCSxd0$=q$8YT2FJI$Z_42>I`L~lGLaCySW~7O0Mst>v>3xEU9%Qav-LgWv zBWaOWGhL52(H5Jz%7AG(y#OxMi;$+{Zl()s%!Z8TACg}>Pk4UmW~*jfE3Wygz&3^- z-LAkJkJa;7q>*cMLhYAY2L1@;Z?YKT8Z-IYF-9{#XP=5g`KJlG`u0**$AOi=+cYHk+L( z=7wo!c5h*6D;xT73}W^`;E>>VyI@rqzW`EFSK#c?{7-_P%zlzYOLd723B zpq2$e!C45?W*G=`v3N%fFgZXL^cKYJ`eD0d1Jz7rFsY=a&j0{Tm zeOg&^-E$W=o~@#=JW(*ZfMv%g^<{hB#{w6WYq16pfXWXmlNX+737D!t7jB>%#ri#5 z+3!P>9;AE^+CQa&lSZKR>>9-XgUqB^UDVt&8qldMkuKQOEC#qZppLb`MU_kP!EftU2#SeYsht}U0+tTZ}bUR#_*mBSw0_bq*Hh*T=N^?wQ zFwPyo5q!NbwtuWtN5_D;zR*y7|musgRM7RsD+fZW^8lZi+=Nc}k{UcH zK8u8s5L8I1sh#c4F(tn?pU}h2s2@9`0NS~5p{eZyl#s9@pu#k{T!xy>lu?a!BvFaJ zH_#}h2dWffIyhrq4N0q5@{+7^h<0NwyX@lB<5mFdV>aj+a0c>)?{1ezfQV`VBvb+5 zeo|*K7naDE&x-pzkRbA)X)IE&&`30+;f?bK#R?dzPU5NZf{;OXlU9vymsBgyI87x2 z*>^rUBUDVF6Al#)=@)BI*M5(yW0z`hVA;7@a@jEl5p5KgV3H1ju~EbTHS`OWJX*mZ zx6kJ#ACn3%>zQPT#QWn*DM317xooVpk3#KN7;ntz{7dl~`<>VI%wKtvj@#J`jm5El z+fGcAp`xHf0EjHQ;g8K2bAn3H)Yv>@l*`CiwG&xV<+7MBORnTIaOfUzEA{mY=uMg| zge|uPx-R@2iRNv`O9VP+R1}N}pSwn9+tVY|ZC_X9;m0wdKpJc6_sID(AigszBWoYM z)5q^=+>rTPb?-M(Wc$P)__u)6zj;T^z@R3SRcnZ{?az+SsG*e3Yj===r9YSMrP<_y zNo&v-b4AXHk0N#&K!6$jdY)zT@rWnD21|fAr-Ea))!msNq5v~mypIg=D^;(Mb?}4d+;QT%B{>c{2 z6%)gVod3XX(4$SR5yx+(LY{Xi7B!*))G>+>^i^%EE@h*W^~XhkMH|wktP4)`SBvdU zeK1Ond|!J~b3rC2Cs2N6AZLQi6m-M;2VuK(BD;KF95V~h_>5xX^A0DOWOO`6D3nsH zN`YdL^o(pCwysgNdSN>s`qdXyNHT*sx`~!iR8F%Qx!c8TBWs`+jsXyf{FaLP@j~q; z|JNbZLf?ask+CPhAlcq+4%=vp$}Iln!bFFLgg^wJ{erex8IL*jwt0j)$lB81hSpmRjo7YT(qfYk3u4?Ky0q5qcvByuy)ljyzs!Wu7qd1cl z7SANuCcgo;xdLXVRHe?NnK(v86>J3%gB0^{3OpR3kh5VWy1U#kyi{dotNb!5dCcu{ zlBb!X>%YHn?Ou!lWsx5GqBAf>RlVzNTXqJJloC~I)~U#~%<4xpFSmxOZ8u6>+;8u0 zU5|iyz!>pt=0}#vav(o?fR1AHbC!H8fp*fb_Ltj}rjkJH`FX7yllKFJAoSU3z9BXu zDw%bET(MF>bNAk}=0bKpeSZw=(ila_t+3rWd!zg9!qT1VzFI<1Un+=I>Mw%eXwWSz zcRjbTzfZwqFk%hr_<$_=8p)gi5d=?8s8oWlND~Lp`-oagS`g`9&idtw#KXaox-O7T;Pk|7HS_7(E2m!1}_;;dTEtf2nbw=1hw7X z4Dsz37;)BU3`yt2N4`*T(`NcX6*FWgqNQ zj#Lh#m%Z6{8E;$Re>fjMKh;7u_M=j|RE>G5tg#V*ihjk#fc-^Q?6l{y*?no+UF;#c z1@I_en9(il>nRv@iY_E`ntqEv34Y<158NNHjr90!5z>B3xih&X)9Dh4;WT#y*sFUe z`g8wHa8o*oviveozD2uY;9rCF#g0FboHshfcbp)W#GHNNj_i~;#~~F5xk~K#{`d~6 zR+6t?QN8E+0M(5BsH!!%8>oLG%VqHv0=>Gp$n#jYhh00#GWk{OU0%0{wMGt0R;sIb z;Yjg0{bWsUE4sNlHPwjp%K4g2$*%+)QsWg2`u<@~MDyTV6e??&vCJk9Cz35|0# zfhTXj1BP?&M*46Bh)a@Tw9mEYEkgr78_|s)E~k=-AbIkWhPY<&ht0*JIeocTM@bUzC?7+<`6E2nk)ht4nz**npHCJfs>4COK!b1 zQpIvEBAc`Zh<`D!W#aEt_o;tIZ!74Oo@x zKK<(2W zlgPKkyUSO_^K(;hEu(ux2ZLMBR#F~zyKfi1kI8UaQx*HcN&v+sB;)f;^t=g<7@x>i zXRC&Y`VD24m}dcg)kn##yh7y94Dfk;V}9?BAJnx#^d+bem9X5}=LBf!*>;V^r7f4g z=S*<0C*#sWgU!{mN3sSpSUUpHg0YLD<+Q&F(irq9Ns4GN0y?}F`2O-=?Ck*xg-;SZaEO_Z?pzN^rOJrr>G@$B{ zOko6@zTo)nqckpt4{{8xoj43Yseq+$+DK2~{z_c8uJtXVyO(%ITle0EEJAVzPGvmYFiAVMJ6ITKkq;m6@f0-}Z>@3RCVOg2Xp?f|Tb zMrm8QMDtP9Ur$uGVEwAXjwSw?-mfygXV7#N!B^1#3?q}>@iBn2V_-$sS21ELt88_{ z9Cl1N!ED2NZrAEx4&VP(&&2-|ayBwq{EtqPUxG1$RyNHnss1OsC?x3|T!WReT9OeKy^iSEfGRQhH8(e$M%ZpZoM0AKND@P>L| znqdc!dpX6j=F{B3_pr;FVC%pf4ns_Wal5xanW=nnRwQ||^)T@i!x}#lo_;Ul#>i8- zq%7*8G)~LoSMipm<#>9;6A|v7yRk`}CR;|Rw{U6Wz)@1W>tp|>im!;vDXu#-IfFT( zY<+pL)7Ho36EOjrAd(3KGY*7=WSGE1)cS#iZLh}QpD>eAOx3M`U({GO@#aCPSnbh< z_oxsV!T9E&!;u%0!J4S`m>PCfj}(TV+nd4u>>@-G%xq=TkgZuc#-JM zWP~rjI=$z1BawUsnUzz@K=8N*(14$SqCVkkRFY(dKiyxjX{t-?{aqqjg@t~bLPk(` z%X>T$!w5V;>~U$YDW}!uarmuW$6AoVZd1Wf8(=^juD9p~c66PpELu+0vAM z0^igAIAA;HOH81ej|QQTu(6UDw}xYVn%hX&v1IwE5C1f84?Nxdmp!;TtKGVtkGWzJ zffC*neGH8FAnsK7h1P4&CGYPjjQx;zJx*tpZI$ACpB7+RCt3t9ABva`;!#L~NQe3b;6eW$mkA#Xo|Z6J?RgwVyeMMe$42{N=yNq$ zX5`c6Nct&nqoJ>A+CIlj(i4VKemsqr7o9=TE|mVKC^CiqtrU8fM+R5)aTmIY>wHKfTC%JXWu3o@{@;5g<5r-;mLgb*_)jX=43CbK$V8 z-in1YaPY$}7>0>aZS{-`LF(BVLO({C?%mZ6d(|s$ktHcne~j@z87Ue2zsmGdZMsV{ zW2qqattML%LU0=}JUVJpr%;&EMN*prxBHb+O|ft_EFcyi9-RtbSB(KHne&0 zbz0$)6ZwyMw0Bqy40;By5T-~W?rrQhTe(?w_pI($;~U*50hVF|qv87OUK^yak2QPP zyRu&(`QNnjWR(m6jLX4&Ff)mKt;-T#R{88p0W$9|UVG}P6}@UMgpD6B&G&pmP4_%H z6nu!Kfe`i!9U|G37o1O}ZCZnILPs6At@I?mEH-A7o2_il0WYdRt6(ZDLGi1b+lN8z z1J*S%{LSwa0xsW+8O&!ZC&^wT@!RDkC_gj&^)p{BT_hKJGs>u+I-Pd1IV#1+t#24a zj2i=S?8ngg21LR;RbSLPSd-iPb6I^jXDxT8if26$pYMY})-5uIybuAAvGs zUU10VnEo&B;Z+r~-eWFI~H+o8sPVRl^a}c2st8IkQ(BQ8x1Htz9BUbsLe|~$c)Mq#YTYgwNj4n z3X7b7!XgGpWKB1CVqDw2yP`OZWMCOQgBEJ8^dK3yVDN|4msBCoNQFl)n9e#N&ou_* z;$PlMFfm&3c7E+YsQD33ZrCE{&6=!PdmvTSO-9);fyJciO}x9_J8nnH(3ZgV2F|zj z|6=bgyW;5DHsRoIK>|So!QFzpySoN=cXto&ZjHMJcL+2RAVBb7A-FpYZ{@nL`GnT=2AqFX%v|&M9z7tQ{k`GqfWR{XgRly>g@E0X z-gcT_p!#_`c3E0iPW{Mgc#MnJzgrJBbX?2GzdFyh@8wVqe9G;rZ8-+i=Ci$Mv3UNV z)E@Qe5ZyZOiTSsz$O!BfesxE%o1a=p(}jNFJ8Dlgbi<6`hdK>Kd}(d^JPm)>4}`R| z;KIjXcmx+Q9lQ41F4%E8wen$JNl#j{vKQr(GcUSkagEnn7(% z&XAg%L?QL(ky?$fsL*LB&>T?et>|u9`yeJD!7wOb%2ze8_TQtR$`_@TYz7*Y)P8U1 zEkr$!Wk+dKGwr%mXgv-`Qa@cwGWry0pyQ?35Eiz_C;#@lCXl(nN{tILr3s;2a1cU9 z3JOw+m|P~e`+1RzK-6gr>M!6L*6{g?4u<9JuRyferlH3{6D#cgVBndCLy>>+B_0|N z!4eO9HTcMO9t zB1?R>UnKFRYLXDVc1RjM6NWlS^nNu8ybNJHb*O#p`kaUWkispviepnM2yvP!ZlRcx z6Wj>+onO1E5Kf(a`KOQu509q;BZEK19lp)%(C3dgf=xz%UADh_jca<_mmLZM?2V&j zpnqC(S$$XT`2huor&o;u7-*zO7#jkOJ_k&v(wd)lnywTg@P7%B!S|wXUgYjh*BAJP z&d~|!=q^Be+KO+Z$b`YjXR!a%Pv92mPco-mA-3BQKhZ(_pO?M<605rHdGY?sIeH5Z zu#%8U@^s|>JtnICFWYIg%=Et;DYDN1`!ShiC9CycH=5x8cas16#^hWMe<6;vRX&5M z_D6-L0^kWk1z5B+*)BOem_N7xXlkYgT&IV(mZ9$+ZY4z2pu_#CZGrPDVX;_>S)mXcmPbv!A(&`XQ>i+1~l%k+Xsub(%_cd2eJuwTjrz^$|fN+3Re>0b}BxFFru zD%G#bA9$S33+>jv%4pUZ3U>8;cX(5&Ha`n<0D;5F=Y zad(#iWQxhQ*X5b-)q_h_hTt_p{I0)ij)k0rNh(+p`8!;}XVqHLKuCGb;9v1PkZ@$~ zKK)#x(_9wfCy;b~FeeAbn{p!%h>mO^Ug=|Mwp6_1$R0Y6hCG{H3_dhj4e-+%RHXt2 zAd>GF>+1s(Uaqlaa=9BRPj|)=0MI@Z!2X@VJW_ksYZXN}-wKpO#l)_Sf}tPGx5Jz- z=My$%vN^07E_%F`OC;TCRl2`&rI_jd?em`(d=Li95?M_EwZ=pSz#IeA(IAZ+kH4<2 z26WnJRFxtwzpt%TK|Ttx;pXFW1#p28!X3dHP+L1qrAh_UrhDIX{72(n)G^LmjVh=S zFo>IA?H^UKAN&r@csx&Y^4_X$3&8q>wAIOo!bLn-s(B(&Q%)K z*U1-rl6Z|s+i6ykI^4N19s9;*#E2(k?i>|^O30?IjMvP6r>~M{A>2Q2j-{`Z%~6T^ zo?AFT=dnZ{pG(qap~gBM@4%}@l_8tUo^H9xV&UNCV152AJYM3Gp*N5MkX=;oe(+o; z9PCP^9Y4sXwRC!>LrWJG3Swj^cRT*RG0|yKSI~U<6MT2MN8e<+&s2+(T|bn)&}E77 zaM9&x4!Bg#6??M}B&XD>S3Y<9vAKjjckfCOuz3}xGUyC`idt|qGBP6IG0F6M9G4D; zi~gqxjg$p2M!CN~j>Nu80q`UgKMFe_11v|i+l;`w&nOTK0=gH{KoH&a8O1A6+7~(| zv-QWDCclUJwghR)ii&-ad;Z6H8dfGqt&RKp@klIg%1rr8l&TCM7Nh=vmfxO@l;Ko~MQ_P;jWY`=A#fcq9YnIs@CM_qDIn-h@=qS$1SwE^2i_3ZwR^w!` zU8-K;`Ot#DjynEbI;AWAA&Y}eosf}bKX5s&H_^$Iw{Io`(F)qr2Ays?D=oewm#gbA zsObvtxta)xyrncW>vYX=-EVU{bU%ljP1dbrX<}EuK@U);F)~Cu4%EiMg@uWQLwP4x zBDokp^7~^*Xy{%<6K@vW1SSqlv)jp{*cm2Uf$KiLARUk~vnT?SO#M#CNQhSRm}fkb zHMH%`mS~5I`CdtIC}RxrbghQ_Kzu3SuVrlxCySfjQMOD|qE@bu;bFfjU6;$VcOQ)% zMF-IEttiV>M=s}DBG#(oT5Bi;C^fbT~RhCv(U z`qS8nC5qSuch5j$GQY+Ev95ucT%S(%QHM9w)IiFSgZs@y8oiI6W05GFfU0H;4%0`y zm*;sp#%FGyq|A&0^CBXn1V{8n-=geeYs<6Wu00jh{-|S?}xY#K0$s z7~$3{tKi8j962P(+R$W4y3UA z-0KX9uy1<)hntR+F8rf>5eYBrjy@hlWY@!qk_x3we~!TF$3OYGc<1NR(qp=GT@CW> zN23it&2lWJa38Ko;MuF0?s^P92e`noI1E67pbih-s+u~c*IBRs3KEx8{BinMrx4D} zJirnbXk<}DCJ;612d2Z6ayrl9*c-@Xv4QATaqU%PEXT08d+44XuCR!v^MvXrvLtmB zYB~l3;bNk((9QW=c1bHWo8x`P|U|F(!gQL}aRt6+GO8yDnAgP=jD)fN+Lz;FvINNiDAxq%%r zuy7n?)9XN#v*{0XbjUO`ncbV$?XN}iRwhK7$D;5Kd=pg96?6G%=gSov@6+*v8?J8- ztC{~|n*bqhlH2}nwT<5cMt~=aT8XKaf1vSQAVS*AB#Qf?w#0bmhfre#+(Wnl@NP1t z993FDy%{e0O=tm1jZK8zYOh!i!tc!P2Tke$U#$jk)PJ}X8khwJJ>{f-gf1KGJK}C7-K>frT+wH=qB__$WZ*RrEd;=h$Xo z)$K$E2t2t4A2y8OcNv^7E{zxkP-@3GfAsRTn6 zz2nY*CdYm1Cq0a`AL~iIJa1l^a(#XAC$_u(={(n_KtDhje&g_=Rm}Ex+)QgYtUWwn zeb?f5bHn8`hMj&@IN4w6_2-uen+VT(@w%P)*mNG7a$1jPJ&9_Urot-t-={PFjlbr$ zbPBWXa~esDkD5e$NfRo6hH1^b9d&Kh!k{0&41(7{ZZAm-6hkN4rQ@ z6_Ct`0?JhNV0pvkc$-|KTAxA6=Ju5#^G>3)Vtw-nw0Q#!f|2{!_i~%#SGa++BhM1V z40(8$b@8CSSKg$d+hRj;?r!~bSRI8zMs2JnY$8eecV5&$xxU#&9Y5ba*1>lBd0l6#+Q1DBz2u9z4q(1bY22a07vU(# z?|G7LXEN9=!FaNzgkSyUdwXiDVuks&e#JxZa%U^Vv#V4wPnW48Pu=zRbXwz1L7BPC z_4ok)0-o|l8C~R7=goT?`eb|jmw(ooFwuT$vEOzH*s|t3asn4ry7!ELU;9uA`3{6g z#0F`2<4Koa=9?FG-366_r86M9XUJeSXj;MAVc@SL2w(xWP0h$LSITpB23;2J8|$QA z6pS7U-gNO3J2JfAo;qD(&{%txMm`CqkF!}!(!ArpK5A5m?EA~z`qd}m&I0Ue- zCB0C{R`i2ujx{p=bb<1X#JQ&TLy@PF+|RuEN51Q60;I0$ms2BEt%?$B;phJ{&ACPY zQorgmg5Q=ie56J0WsFvn?8X=(IRdP#Fr?^gh2jC9qutx0thX*Zg*`OVykX@CswOSNQ0lBngHw0`6o<=zj4)?Q2X4~NBq$Lk>c3G64BCM*bvaL z1R@}PTOWlTF|duHjoD)%XG+a4-1muf6vpdtHLKgB!}uS?%r<VtM9C63M!lOqlD&->AfEjV##`l|b69nScKp{1;P0EZ z(Sy0`YySFP4vaj~zApb!R<(OCx)#Qil ze`22hijXM&TIvi}#>#FNJ|}Z?Zx@V3tUXJKIuIu%+6ID4Y`wsqZeJyb1=gH(b!2jz z3O`Qi@5q>bZL5JrxR*`RmjPMTnLT2BeHC;zdOZ=k5O&ybe^ELi&)#pGzS=mTJQQGv zBv-sh`S)v@LvyM zJe0PYYC1`g;mp>Vrc1&QNmb)v3AH9(d-3praC{rnm`(w1_x+TRy$;+$M>KuXoWf)4 z6qy*GnM*}Qp?Tpj8R{T;ysLN3l_Y_QsBXJW8ua= zmcd*vpU*7<4Fh8U=l{xj0EmYTHx{$N!b{j;KKsN7kZ!oOvVKVCRr#L#i(YV zipR{{rQ1I(Rj3%q;xIc@ww*YF!>JKA9K=?ycjnIkL#L53=KdXW3iR!l49e-pnxA^r z&Gu1omDqCSqX*Xq_K~NR=J_aW7f;}1=Gea6o@eQTnQW$LB7r;Qx(#Fvuz1!-y~mHd z(@l)+yW6dps~>9vN>J|<103C%dS8KziQE?dJ&w%p2~O)l?kpbvZ1qY*HNY~gEJe5J z+H#=agENRk(t8aOA+ZM@LD~Izfj!scu2)<5f$1g3`!(PUoGOEDwqIuecwkz4rnvFx z`}MZlt!|#kvKQh)b$_Ce$xhbX*N61Shz!LH_!P%~mEM_6%d0CM0UtRpHf?8+XUG*Y zR)JDK24JOU4EJIU)YF2@jt071PGzHBhFEcMqp;weF_+T5s=;end+qQ}R*(i1wmIg& zmMux3{EjZzRP)DX$Iq+CM6TmU?^ zTh=Gl3g}Iy{F_3HeED+l^jbM!+nYb2%f!o#|H-VO#d=-#{illh9fn*XEt{pPO0mAR zx>}~qxS4X@Nl3KDL*uY9pPp1$WeWAgQGM?=S$Wj_Z?@??cDa3f!Xnwd{k@bVirsxm zAsakOIbFWmg?cTm2H4;h%+n>K^oeV0wHln1#uA~l-wHXhZ8bWa<$^s0(1oX`>B?T@uDB2n=F5EC^;$*CY98j73?~LyKsYs*pTl}1-J>rqY;tyF+fzP;s_~P zZ6w!Re}AouA7J;s+`aUPW(5x8M}5mL z+fs>WB;uZ?qwjsGG$^jrJwD*Q;R^ifk;@s$I_935#Aoh=vy`)6Z_q_W%-8Y`^*F-K z3&L3oo63gPJ!R-4!-5$e7ACFADOa!A8Y7?2VmOh?1hVYasxxfNRU#l_0OEC%0Lob! z5Q8}Za7z}(_GgT~>+~CK0b(|4jcWCDTzai>2+(6DW6{Jwlq5Lz7sKlBUoV2#HaS4x zMG#;qP1Z7JcK1B&&44hXo1pWXj99k^F;TG_vU|xfbj|UB!SDKvC`5uT!-w4u2o@#E zrGHU4Z~#ODV9HSektKAY2>xbBi~nxVYfB-2cjw}O!#J4V|A92M4d}psJ0_TKEpB%h zf~Pg=&8_R(ifjxRk~Ej$rfWYFXIbsAZKq6dHkCYzCrpn3($>Gx0~(_}*{$GkDBvkb zO`=Pq2WM}-CI$uVLW54id0X8xDPmqn6>@GaI&Y5L^}-^h4Cln(khY3PLv<*f8A8h~ zFV?P>e!290d<2JU_7Ym5&_s#Bqoo49_VH+JYS28N{tQ$ z*O>x7J^D$_B!g!fmEx~<8{H!gJw*-b73vc37{nGbdwm{`MVrE}!2!dVYbal(R;%}y zF|Wn`Bz$KUnLKd}i=Jq&{uF0ZfRolVESdYUGNv<+5hqPWR%O|Hx7V-r_kru7%zFn* zJSk{FV#v01FvG~Xw+UTiZj@glh3T+y5aamM0-m{62n%8? ztuy_V)#sGd4Y|z$Y>*B8?evF(Jj7{{0r^*jWjdP}EPApsFCK&TM{1=!wh+n32+AU# zDPCQq2a^rIAih)MH!J`pr$Ok{5l*8UgD$< zo1SwBDa`X1!wNkkz;C%^M^V+jL5YX8??F;3i}%FU9~}_-Z6wD#_SOdOJWt!}m1=N?lWt#oL`&sJ<*yMFln%rk^d4o_7nq8ci-A+!lVRU~1j;tIF*wp8Ztd zNm3mwrUQayQ``C67iR|mspb5{g-3U99|?`bDlx52aoFk&Iy&;B(uN+rX3bo$jB?b4 z-H#eWgFE)G)X2CDGyrm%3@FOzK-|LLnZ4CsW;Wb|v{c+?Q9guoh@SP_ZHS9t;owN9 zV2VrdM&TStGm}feVZmVy8{r%vLaU}yDfraS`7(ciN`CVyfc?AiZ11K!EW_#Sj?&T7 z<1lMIkNs{V%l${>g*UZ#Q$A9fmTX^(qWQ@Xy|}P?8-4LxIed3f%d77fDRYh!rRqB1 z&c}z*-n9^D)yw|fdx=QyD3n=i#;fa5SJYoXaxLhc_bo={xXlUx6V9%A_syel+ElPq zjD3a=!W*V({yoW=&+D(>Dr=rC;F+IYS^k+3^@ZhX%&t$pGTGy5xqk)wWIt$mr5A#r zu8WC_Xi=Gy6=vpV)~4NI)0z$H`OFgAt@7&56j&E6uT9^8yyQGyAU6eXfLTzrP*puZ z&@5Wf*4K>h)dz7P7omr-lx4*Kp3da^ymxZK=h0C1yNd1U5BFj~wPvNL?P86FxcE+3 z20sV3xp$cZjzX9lew6O5-STzid>?>gP#MFQYci7A3n@KmSOb3xP>GR4=2Bm#V z9(k1Is(=95=r~B?evBt8Et+NG+;4U;vROT0L-{9-7GIRxx|;mLAN+;9C)7cdK@rmg zjSCKX)+t9l;dcld6qK2}qAUc1zVK&+J*r0|*)!F5N%`6pQ6%^K6&O2LikHGSZ#iRy zUsHdSIREC}c|Y2KWnYV(CSLmsn`Y9NlOE&DR3Y6!x^5$`DDHFRkEUJn4j3XEElh+% zcEmYeoF4;nuoy1uyv*|rTLttXmv>bfEiQX>??0JghKGmKlDpoYCvm?|kp6V>m2O!l zFP@*;@9E1zsrS|I#`kqFwNqv#B~urR@6k%3qO-UHp+s`nww`3vd-Zuhy7~5zRw6g3 zYKwrqv3sL)fE*69B7h82fFFhF!`J|`aLKWFQmq4)pPL<5+I*3Z65`^?N_04EQY@^zBk{v5?4HLzB(fMq^w0eU z%gx7=LDT)aszkp2UP%Rc_uVP_Ro}2OsT8e>wuVqJ`on~ zZj%0p?f3-5cj+57nFlC=@w}ZhzBGeAyb5eUXi*$*R6u1bmw^Q(->O$4UsluRtSc45 z>lJG}8VVr+8dIQWmsF zLdfObRJAmUq5iF=fqpNM-WYi}$~@KWas!p+Ox});iO*oo@}E~Ic3eM_mcaPdmg$H* z%c~)C%}CFqS$4(b%*?P4UHjVe=^sqjEGZ}=4FK(s{5(#%sv>NEs#>EcTyy3h41vs` z8hn3`NphciK=z}*3XPy`tJ)wel0tyKgvA_oX9%S^JrPgLSBd;YxBQk{!>H3MI^bnb zOBNW^mH0cv4QjGJmhvs)W|uvio0s5EZ&#|O3*4IFQdYNaAJ1jFQ`Q`!a*#^u&Okv7 z3ksFxui+Qed2^-LIWF-Lt&@^`@%>UGw_gE!yz0z8r0$9|)~!Z%Ltl--4O&b$8|?;+ zDT3q#-6gPa2isFt^4{;APeh+G>eHC7?FMtAnURDkXvG2x6Jn( zUSbcX{kf(}%44CCU*lU1Ibz}a-qs)^{SLMPb`BFb#`;@f-<7u-q_fqnq)V(i$!_SoQAzQvclO6+Op$ zus|~U%#x{PQV@=I%x5l_YadyA%}X?lf<7)ea7btiU9MNsua4Y@qLseOtlOO#)3`X{ zvWIdR-iDD9K5scwWs(fcC8JvcA$Lgfjb44i=e->PJMcGPn!XQFQr>x&36sgxjDS;Bo zFNx~Hs8|Mf$IvWVs!~QSmk;F-(CS5ug`&=p)lxW@p~qjI@49oF46ZfAKyX|7iNdm& zG#$80P8Sd2`pBs8|Ino}JL;LHroj-&ZE~=H9UuaAgeo&=4f;^!Flby9`F{w^(8Ki>eAsgXJx5AO0=Wqe- zCect#1#Fyc(vZ@XMH62o^6i!KuLTw?wqX%|Ycl3sUGD10hM&s5j49$EFrLo6imrFye*cI@n8yC_Cy64nMP1D|uOH{Lf}!69TiB~Sd=$=+9znR);4SSdhP z2nzlc{-aikxk_b+XYX%TA~_k5 zvufn`yhHck&KFQSnSA;jyI5BG_2u18xWB50!=@-c9|)}4@d!XpMz&ExQLHx>ruXSriw>!Hoa9HmW1YwNXg zbcuY;H*M}frdqwOm@0uQ)K7Bc82#0h|E~Sp& zHx^qTqxXZGkVoO^CS7PKs2wrcS50!sV!yq`?@zmT$p!RT@G zQJS>It2C@}u(8ceLM?c$tBqyip6w32KrE)L7fJ|fn!UxEq1n@BWfn8>sDc@4ZqA31 z;ZL}rSZfFRpoR#|L^T;#czY<amnY^wXi5X-J z`%oWTzP-{CG{d)X-#%F|8{w)!1XUMX_BU30*{yT%uk&{Y`HnvXuq+h=Tkb!iwrs|C<=hRwh-LDt zCGV0WB0`4*-BJ9)&dd{y_1r$W1OKdL(8qN+d5t)g%55ZMrZs4}Sun4sqJP7!P-%#h zSZh&i;(xa$+u7l3R-FF8@h)QO_b(J+xp>d%VBv z#wgxvjZ7z8>_}4dM=z50am6sEvn^+cWNI=6Nna+tru14)?4SS=e4c}UNp?tOV6c>Z zPM^$;Xf~ZHbAlS1Mwm`jF?&{7KSHAcGWy5RFpNu4ZWnwDKY4FwM~#Ao8n z-MW{I?C){HZzafN5^;?Qo&E46h;jy`Zi(&Jk0NpCfaINjKl|@-8{t7q4(*O;Wvh|p zboKj*CZt!Eq0rpO9OS3T89!Sv&Ne~kkj=Tg<^&-V%UU}mY*<(*5n)VBSlD3d(4eJy z_$K&RrNc=a{jZ#XQX-CxC&<>`M0cm*fq!m`NdLSLUnTVD(5nsp_f1eDWS_Ab_Kb5r zj>kkyaMjR+<5-LJejag4RO_4Nx>dU$km*jCfGXdX`}m(v`TJqgUa-lOI?>^b;g7t{ zN={6&xVb5+aC_wLlYNPqQEG0lx3o8eu1_2}N%v>dl)cbsu~GjSgCO9S7Yg)vW{nq6 zR$=_FGe%)Rw|$dM)BRx&&61EMH~ z1r%m}C3D$~PbeuV)UiRey_aUS{E#)@CWmCLODQrLMgzsV_1O}wvyTfc&~TU7ZKLmG zHV}VR4~@j&Q9>4WcGp)**E@&*(0iYz+zKTBuw^zILGf|=Rv7FCsDB?1_N`L^L38%~ zy4T@q;2TcABgQJ-R%8f}Ep)x`v;F{Jl3dH|E2;y;sg)*kddTIG8?WyUVYn*kXMz5no=20VVpgViBt6zo@x-wF zgnjdg`SmKHK)X%)^`%Q^%Y~j_Kdo|}cb=rn-0QLf?h^&Umh+|777$L9VJ=U zEOqnupAfWnC$iMi9!9iGe+X376CRnPy(6BvY*#J{`O#}nNT>f5^X^JSx7!SGQq=UtFe9pL1wBqmc-^MYbb9QY~)&m1VVn*#2~Z)=4g(qhKJhpa)ct!&$li zQ}f~pZj}mzCni3==-_#cLHj;(a9G9Y!3b>%M1M6%~~-w{VnqWosQSv&ljks#el8?vGzX zSGX`>XkZ1|c18n0@L7QJf#yn0>{iI@>dTV##xoop^RQSRokoo`^Fp0O^->*3A@^l> zTx~anjS^552lvJK1*J3UO;goC!rnQL#u5BU*xYAdMpk3qlYe#|OyKh?{q{Y(^_5Ic zpi|&lp=X@X$&&%MgdtbI_-wxoc`@d7-b!QWc&pb}OZ<7*_1K z9ZqGAZibKVU^py}06;?Xqx;gdL5uC317}n(*c;e#rzv>4e`S8r5O3I8b!3fZl5+hm zNHrSoG;2-(G@NFXz_0q^8O%mo1706MN(J%o=zpXDvh!`G)(A^9`wsbiz(d&Gr#^TV4zh1!vL%07~E7K_#2n{Blob|>!EKs;6s)g zTX2+Cy?EYLgxG4o9^-MgEGFfhY+7G$Y#9FV`+SR{C8=QRaG?Oil*i}ta`b1dFpc5EM87A*Tif(aiISR`xB}ZH}sh z5rC4;TrI1Y=N{1-v(dyAQ>f5-^zSr$&VL2VKArsz%*u@`l8Aj(*Ntj_-z`($rN{9aqy;0mmQ zE0hbCp4ZQ@KQZ10DDekzmDcy-O9;I_$IiXsv|ft>OflqJkGcm}inYHWa`1MwITitZ zgg-Ug&mrG_?eSWjE3b4p7IXp$d*my$#{!&saQv|V^j%9Js-v2S}Q}U(XMj#J+32V0flHK|TXphlRTC89Fo+jr$e(7){ z;F;%2H$Pix%_%M^u9Pc}zkUh{FMFB0h+)rS8t1k>nQh#mt_a)|jA;q63aa6Wjh)8s zWA0PeTXzJ;oYUI*#az|?AxTb3+XtD?a}{dRaidcLZNtILX6!ek-k%hvyg%Uj(W)oc z>k2Hln-zDT+Ls(0h_q>ID)pQBa!7*AI31_R_n$#H?N$qtdE5CbZQek#;Qc2?qlOK9fUuPpAQmilygSRv=Gc+1H_Pw)JK$hb<9Cge({f z6JZq={^$C!mXNA1g+Q+~s!k7;CT{$WLe7iEF?O>>$#ahjD7u1-h>8rAW)GaMGRvYC z0WXD(3%d<^ylOG3blH=oi+^eNx_HZ8Ts%|@(KLU{mSefV(r8RJKsqT#rT~&nQsX*T{=mJn`CwB-5bP8M55gu4X| zXy$D%+gjjGt@Hk2vzW@N##nG%pwsvvPY@dIz@>hF)Nx`C&g3iNJtS<7dw|_F9sV{O zQ(?S_hOowv#a>OoEw7JxO%2GDE69$Y!%@^*b=@V@olfO9$@1x&fF)N!9PS{!+2YOL z^tziPWG1?Sl0dTG1Z-x79`<_lDKsG>>B7@rwh`rKKJWXzzCS`xML)@ywfkgyOSHh8 zEj<*+dk{HJ1vnQ~-!F$fEL`g3zCJ;l-iAH#`?lFxd`_nZQkYa{NGFvZH-^uE3ZTO4 z!ekdb6OJb}^Ch0TFQh+3^w-7uW2p_4Kq^Va zz$O1h@hF}Ov?l_OpHu*zHD|-oJ3DdlQgaTVFVbvT&hp7f;r^DedI{)tmkc&;TdY57 zU&SGNU1R*FxnKU7qII}f^r_|%9x7;vT2twBTV9BQxMYXZ!1Fv3Rua}Bn>6}IL3iQD zZ&5#=olt29m?2H_ut$9_x^_WZeT&csWK#YmI|oJ;cU=KcT;Bv+qlOT$ zwkWm@FZVX4ZD@KC<&v(`SCVWUBQuS%2@gJ@;Y$-Q`z^OCIqrQE$mc|g&E&QfLLc@F z15r1K@tn56nDL^TL6T_SK4{0phwe|SIyq(-lTw!SzfRJS@e9H?eeNzxkV~f#0e)IM zmpq8gZgW!nzr+&@)eUZ6myfubrYkD~g>z6)VjA@uA5lD=3SlTtKK68|+Cf0Ks*ag^ z&(?rl0qTdk1gMf0>>^<9w&|_!$RYF9N=EajtN2!Ldx@qSl0PLf~Ay!IB*Ni7YC z38@L9?suFunBhNN{s|_8=#)x-2A7t(T0V`Y_KP9Z{W4*Ady>s>P6tU)h};9GnZK5; zTu&{TBED~F+#yN%3`_^md_TZ>9{s2h`%t|S=QmAMQ84pLl8fXzvQz-yrao7#~<`8uq|Q86f^Ro)J{jnED!b z@k{#X8lkn-*0L3WNp+AYyaY#1F{gmv9fxVgD@R_gsOR#30$;FItQprK4Akp27$X z4NbE?d&5`Rr{2}3LzMX-o6b;_uhxu$u2^$}J2?gg1qC}_ez7rZ1BB4dja0c0ejyXM~eJ-<^BTWkLsTfmyOq09xyhlc_^ z-CorFi&G|ZAu{R1gQcdWe^k9Wb-m~ST?|H!f1xz;oZ`c0a^}4+fEe;3_rbj#z3p<; zu%HK3D)|6Y(#2nixq8`d)`DD4LOQ*%C ze5TOVa)y@L=9+R+q5$JHPVl9oXAYUH2`HS?4@$5zb6L(!+_B-6wT5x^;~ut2vRNVT zB;L$ga3h&Yfa8g~;%4t(8UqV_|5=BOC>R{as)%b)s|H_;SyDvhD*QDi1EV#(zj!U^ z``$1Z=Y+^?Dk%#}pR+ql0K?qI83gkL)Bq}S9&BSh4{OZ}R@0LQKZ^YMw3@V>YX$E1 zyxi9w12wBD7nm$*kYJ(SVPZCILu<=?pgV?q@y4Cb&s#m%F9;}AD_2sh{&XV$n>Z6L zo5Q~VNgR_gS14n#)<9w*-#bFrg{b>iUXqHu{Ieg#YIg9Ud$*%kHbety-RF9{M=xKk zE1$`zQ=8G$VW66~&L}~HON?>7##^pc3495Xv$`6+vH8vAJcr>CJJPhk+)E@ThJaBT z`s6XcEreHrEBub}S?$>3@=tM>dqWzRMNL1pZt8dn)m*%Nj<@ILUBzqvK=;_xro}kX z%@-XrL4unRt?urpsKJNtF9J4j2)C=jf3i4;QgZ^n$8pksXz|%YUilhO`=0xK-&4Ns zXx!K7YqV`0JEKT>VjpqR9TZt8E0pCCyYObS<8i70OxiD4qTOHTEjfVWL zFl6XkEa@a3J;jIT2LHI^VU5Y`bGEA2-+B`dn9V$ypO1U4jpKu}?^jxF9a^!HoFwR{>ZcjKZFPIsqyDq%g0eR#@rO3;iphKf7w~$s`vG$90L10 z4o85F!GdK!?1?LqK*tKlGw=HXZJwKaiN4k2c9B7OV=P-JjD+ahrk-)?lf6SnR9oZ zDiR;>ML+U{cCkCh?FqAkTGd9Kz@~{}zOYyqc?X1GKM~11+D*1RK~KDQ#H4-r$|ADF z(G&~<(Rq9)!Vuzx*=6qdhJJTne^B0W-uRdy5qRG)Dn_7XH@h4YlRdFKG1c#XLuy1Y z6HjijVHbyIaGC|}{Chpm1QRl)-}b?cw~HST&fL@*AMh|xQj7ExVl}j`_ukidnL5hk z{r3dH<(N=|EuIwAGt%izcAHbh=xBL5mCn*na?2mAB5}g zOCKp=!)iW)Hj+D$|8oo?w9*G?QO*28#SAI{lKXoMBD4$=0KLdmwwKoZ`vQQDMFDGT zdBZ$t{J$>=tBMMLlT|%rtgQa;HJsy+lbP+&9=rXYa9Pr6%4uk*$O;uTlt%vx!SaGe z6qj7{fd;*$#MkjwMe`x2kuCb~R}LaeG)9afpcyDki-Y?Yu7eWk#|DP^0yZW&^1tH% zj59tksBckq$F=`LdjC0;KR756SE-Q3``_F17fcNKKxRhy|4#S+=loC%QC@%(o%^t4Dckk9uw$gA)s@rpU#+?z1 zN3WK@!yesXOl5byf zdZn82-(hyezGG6ZTl20^dJUGBw@?90jTOd81^uKWF-S^N;Gr$61(}yBti*>$*_?CjRj`J|tBcven=e{V!xL{P&A|m%;xzrt}>A!sn6+ICR%VAy@6~?@NhA zBowL`eF4GO_n6EzFg&>;Z7IbHrR0RvhzR@1bqt4I7X}n$hp0W?fB)&&3u0RDR5M*{ z@QOA2mKc1wJ9X4}_`|D+>gISpV>FIL;x&hNB-IhXRSq(AnhyX=k;>Ve8wKEYqOlO~ z^v~b0g7p>u{eRRK1+I!|AdMx7kkeX}jL3k`g9QsW74~G}{6pv;x*TR>!|$rin)dH6 zMzjubbmIbG5UO`X#3JX1$AywHu;1!k{(AA(?egKorO}R3E%RH0^!E13!d&bvZC3CL z+AU91dBCpZw2`_O(!S`?^# z+Lg`QO-~Z=ERR4tN1KWTZXl2xUKSRvewWi0KN=x-5Hc>4Xj--U!fd6~nNp?74%OkB zp}#y*E;k|yirUGzQDLEu-L?6C-TKs^Q!;ZP)I$C1lRLco@mw0GNXO1_(wl!Kx_n7k z@#bx~9>W?hq?OAvs!rz}$=sE-PWM=w1+PU0e%m%X^&%5WT#w_QDl3!LK%jsep^lo% z&4hV~&QN-gcF->(Uaue$ey_sf4|eC4LzQnaHsszhm+7@TsQTZQR6N-T*jxmt#Y9CZ zU$B`?R!9S6F0F(5JuptkpY{5__hx+rgmU=|;HhLzLWLLW_a`fjd3cb<@0U%yubw;b z6RAASbEuFW6P|%dIOjG)x~457n@2Awa2|Cc!ipJBS-UtEwl2G1_9LwgDsnNZV7X;lVs}ICK`w9%J)FKV5U*S3Z_w76V z4w6|ri2hmg&VK;Y@CZt0w|4Z~>#GEzPK^|Z@13f=8%L|nYQBP4tU?)ip3Y+&fmixV zG%p%5a@oG^Vkw}hQhOEbal=`(=^hPWb+5E8X-{KDWqQ5^GjsS3h;j~*UMwr)w!apB zaDzO$+uS;s?!koiF9g0y?06>{yw2#Z^x9X)l)su!=Y~g5%uH0aGG`#v#&ZVCR0|J; zq=x$oTFm5rj@h>Bkw+pS?w`fgr9gc=jjDb8(>;-1Ej*Ro{-eBX^BE2~Gu}WTXj3IY ziFZE6Tux(dGvFaBAdg4N+Rw-1WZTB$tDhdk1sD!6T87)=UOsZb5cARc(j^v0Phc!oKdfww0MR5t_Ei=WP!oe zVp^Tuk65$K56mKz(orz7Kuu=}q%>U0 ziT`uy<-~uRLHFoEwD43}uy3+n%EbEv8^<=ZO0Z%c>+A1zfs-oHWwLu za&Ad!HxW~P4|iqg)JqZVSDTg@&=nQ)xg}Y2`VrtQ-Y34XmCMJ{{A#7rd@Z)j%8mOp zV5i+a%~%{VnOnK!PrvfMR)tozl*WFwb3~olO1^;{dG2&4ewV2uQ{alrFk4_CwO-Jx z_jfFZ$Ft|-56~t>Bzo4QJ@1F~fT0iE+#iQe-?|eU1^iPcNY^3q-!{nCG0H=qtcQKl(jg=%8PAVPVTU&8R-00`ae~l5*ARibQdbAupH+#Um$B+y52FZ80w77#!(Sps11ZW-~M-4jd2;S{yv6)kG7fA}blL*d0Mi}~CwPJmtmWKb8k+G;ja z9C7X;%ynxJ>JC+F;k`EYTPOgj&JsV#>IPQPtOuUGkF`5zaJV?J6Oq$Qw9rAadvYM1 zRplcPZy2sXju$Sv*+94VwdRng)%_o4YqlJ&Uq``LO?zN9u*F%{W%l_A7e~aM74T^s z%qWI6nlSFi&q&04kECaRI_Gd^yjbG3kZ{dk@sDliNwnZey#hty`s2jkn+p%3tvv-8k)AJT$E(^8Gw}e2*%6 zt(f9@aC_p!`?!haa~Ap=hZvWb8pw*)szf+LVNcD_I9kwVzI=P5NZJ~Utp)whis!ZR zcjrwh5D$fGk>_~ow!?PWfrlFb9r8f-GQ?Iq|uygS_h%|xQ9gc5t$`@dvO zl$n43S&1tb`9b@O>*X#I@3(5RUXm+ZgFX+1aMQx)3I#bTIWg-)=6PC;Ds>yl%^p{& zK^qE2Tdn%yVE?CzJoiSWzi}>BS7i$P=XV-oy!jXJ%x@2GkszNe1;0GI#JRj}YokL9 zvs<1Mvc?%(p4hEbxU$%_>xtVtF#WZVqO~O41&t;F{&bT0QJfY{x!%Dbbx~@p_13ml zaB-UlnQ*PI-o~>%zvqBFr@Tz@MTCc6kI7)8-4dgA2Ula>Yc-I;TM*nUIRuZP{yBak z@&DuQEt~4t)-KTC?(Q1g-8HzoyIXK~3&A0{6WpBu!QBbL-3jjQx7p`CC;P+w0k>`y zRZt6h(LH=MXcT> zKl%yt5T}>L;+|y4PkUg37&AyqU<$A!Vc4Ck!Oj*(q0MvTKtw1y4$gs-uifmRHqLF2 z;h(^_a5503u{e_arM3wr-4=%Ta!klcl$eQXrU5Fo)^0ec;=aa{EU8-X?ScQXzqZrb zLiU<}E$q{qSn2i?b2fotbihdM-&&$MEP%f5Tfw*xpn;%fvoqHhi;p;6-PO8k=XKe; zl@pV^KCM_7Bu^f?R}nI#vOxe{EI<`)sxnZ$SPfQDhWT$%7bIj%5=TA^%SKI1P3;j} z--^<-|9OXAOsFF8cB9+ZO(0s;-{t8iDF-n1 zs4yi5ORqxVaMMc=maPb(a#R$*=+jRt1zj5ri3}LS*CTvg3XnqV~ zQMzrEDc(v*D)`?u=!$$;N_0tcQjS2Ue(?Ux%%|1)onm6LL@3v&+mt7+=|X4)UhC>Y z(^VS=>sxZ7%KdX;yx z%`wy4j8_M9t6lL*tn5~neW~7}iFSA;A|tHx)AMeM$!wFw#AQmf7&wRyG$0I80_;}~ z#yQuO?<|wY(jL4On`SgxqOBa-ol_JnXGc0u{JdfAJYVLIoU_$t3;}xsTF`1t|tqVGY09`&J!ueQ<-}CNhm0!ql2;G9O0v3Zy|eW)NS`rkg@YPB;=wlXuh`&&i~3; z#=T1j1=vE7dgisEKwLl-plPL|`nu*>3?r9znegbnmo*T=WC3B8XFEf}T%g!3qWWE= zzmF<{l$3P8FMRvCw)d#$^6~mWhOu3B(1ihN4y8or4&(55e#%WGA@q1MJzg}<6}y?a z@_E5W?^}Gf(+vAkwQ`Q*!QJu>&DGrQWj3$tlxui2n|#2QIUb0d_)JRkV=3^d&F3NF zxvstX^*mmZuh_tc`HOT4gUQ&G$)Ml!R=LtPgmeN0-I}NI)^z!NF`L7NA&q(kb{4<) z7pFgCV(s4k(g5pLm8-uMnHGmn?k+3okD!{c)szJ*O43n^!ZHx^0~^3a%udV+aOC=J z#)V!h)v*ZOQp+LBbNd-Pkkuz)JD+(;vf9 z3Np@)7i zP?Mzz#g^wAAQY&eQ&Y7j7Kd|{P+5Ec-ZDpJ1niX$6;jNr2{fwpzXMjF;u31<3h=i? zK9f;bt5&dg1HnJ+mTA9DRW4IToFDN3wO)kKy|BLDn{Nl=?ZVl#)4C^o4S71skMXYN zJD^1S>3NemT`8n1_y$Tt4-jhwOyxC5QAoWiaiCjgdlqML#--o6FP7(+t|v>ufHSxR z)0-x&egoieT&SV%nPku~c(B6##0hpB8c1R_>tsIK#^;MzDg6Z=2R*GW*}eD6S&19Y z_$S5o&YQXA2BTiDgb{c! zk(yPQ^hYGAX2e|4ty2Mwn4-s9l0Q;y1jL(2EzR97ETHL0AZ5m^# z_Xo3LnUt7-3&5gYw${a8oWb7CUD_`XkqeiuuiM2v^wX#R?arV>Sj@Lx;@e5|~NmqySwsqkFSA-e-$zJ&(r z$p9vTT7~+7uyxjYGJu2itYNe;a=lK;9Ixx4$UF1L^=PiPvs+Gu%l5(j^wU(49NqyC zFzE7U%*xegcLXDe;2Gl!;!D&Om0Sj=y*ha@V9z*Hq2;WXIlB%D0ecbf%p&aCZZj`W z5%XHXb0&&?%zS^m+$1sU*cBjL_KMABl9!J`&0-FX00!xDkGp_6#ubKuH}R>eqU;G! z93?J-l1st)FaSROLL?;LBl?UcaYn3wwIwgVX8jWd;7!8kYJrB-fLr3Oq<)qELfnHK zlEv>Vj>2e&*?+_JUoU_!AO6Ubt9riZ7><+ zfQ5!u#Epo+Fd6r^n8-FKt&uPbuT%U&Z$R4?C-B;ghBO&{0JL%&{NMb+xbnywkOd`o?972}(UOay7 zRm|@NnmTG(q>kB9{I9|v7`IW#1XNgQxjM8GfR^866b|b>M4L`&2G9es>W|nh!ij5S zs|6E-wMjwcw;fF$pDB{#B{YmI)6%;==1#l?1*3pG1oW6CDSl}zyMRF|xA{%*HW73> z=2%^$;YOg`iY^Xl+avL8=R#&Ta8j#C~&f=Oj4*{z+M3 z0^tcTFXz3n;FS{P>f^Ifew#fIKpX~o;c)k;|IJ~N?cJFSU@lr*u3<9%`4_)N)}%cY zB7V)-noEo@mH-z(6=b)94d`FC=gb56TU2PGDGEQQfY1MX`y%PPK_WN|$HT>XPNbjX za(5qw7H3LT_H+E_$~*wPX!FS$Jqd_0u&OpZ97>{f$49_E)XIh$C0uQF@-<;s251as z2BoF8F%=R3=ZbX}%awrp(%CiUTq=R$N=Pr^hwY+>#M@J>ln(rSI|uCw*X{A=DR?@qy01^3`0RF%0&>}W!|1#~u+I!2#ic-ifu?0c*&g&a=@~LsG&(D zGnE|t5eRr>QO2t~3AfK*jAeClg@k!Lw+c$}x7kNcWYc##oL@TX`!1|_2RR7hQnBY> z*j#b5B%JytYzS<#K=2U7na&J3cnyUFe^evw2;bG7_0QU996X&-p3_~EY1f-4rC&T< z{ow(qeH&?RUs=UG#PH!a0PFZdVC^V<>{@Z&pA^<^bwfZZsx`t+G-Ls(Wga=4EsrXe zCo(CDN~2_ho9x-J|8=^02oams#J||;UOfyBN{^&^!bD^_4fYk#k30c=*&r?E28r1+ zev1*zKluP)moXG_5VV=Ad9hUyxL`Vhl>(Sv4Nt_J%xpw?J=PVOc7r+#2Ce(&44M_I z713l7jG18XH*}dgAI-@Xy8bqr^9!7=%HqYan&Lv?S51e3cM$*mNiwWEDk!a^T)9Hi zajDj%6`&LGf1SMCl}SUE zXgRQgA}A<00yv&Oj;6EI7D}cmr#TruPHovf8O}jG%-ms2fu$r=h|y>_G*8IC^vFU+ z!>%C#o26Y;C1J?@8-416+jqocMvW@=qq%do9coAcn^KU`DbYt+- zMK21ahwf)*rSYf(ANLz zYp@cewHpDfl|16si+ofK85uRI+NMGbldaKHJ#;bPb3M5IFQqukr8cXL6qwF+Mtt=V zKg&$U0t^2~OKMDpXT1j*z*RVYIKnOuVp`Mz8LfoB)z(Y~kPWJsqmDj)AWH<~Ao{y| zf$&TVcrW~h-Q?*3F{b4JlV z{w4onYVoIs3E$K70Hvl0S91^(7g=h2G&XZ4KvFL9;jO{ygKIN3y546@3^rfyXxAg9 zp+Ym`-QGBoec?laV$@Czt|g zq^7k%l%p*Ho}&mpyfMIcR>$RwM)TVoikwxclB%FUDw6XCrH6X1-zcz2gk51st=^)= z9h%SGF(B{=BQX+i=3K+TWc#~q3F8Ri|+_-BWMt*(S zO)9WHG2E{cy-Mi?9;>rWR(XvmGEFswHMBvCYc)oqmKR%M#}#RT;xYHS4|>=ZvlD;9 z>h!b({@8Y0ghR1_I)fcwYM-MH$YQlR|KweUVD2tU>uW>5{jU_!>?^LjY zu1UoGgT{50Or<1J>2Wx3pG(?rvj8aT3}@pj-2xJc8J~?Bwe!W#6xTamFi6%KY3B;| zY@f_00A0UNZSoRe(<*tm)?UmKwnTc``N|WGST|RtQN0E(;SK$0^?VOgCh%h^#2W&* zS@ql==Za-2dl*TG38c2xSjchdS<8E6J5KA1B1%0cBmpl2`RzWt0kYC|$Wi6=vokV8;a|0vu)&O^gWgGYZE~zFDa5uvz-kw zuV%X5-XWq;io~p(QNA%BU#pK`A9JCq_kD_8cryuMvFiS1dc7ZStrp+TlG*-c)`@v# zZ*nQHMkvGK&lpb6Aw%3JZwdzQlP*If2@U8+xboTD%^)o_#BpSczamR&uaD@P|J^y0=VkkP*fa6>?zG?elCz21E7u5W9dqmu+YGznxt7s^b zzWB}~UOV?20NnMXll4?<=}Ik3=f=5&WPQl_frQhegjFtdF2X5oX^HFiu-?f10egD@ zaHj+T;&IdvQ>*T=zROmec6TuUfTX^!6spfl5g#nhUV1h@Xa&2HL+16x6*SwB=XUTz z*xIe0@EL`41&QxMT?;{NfGgksypHHz3aznZL~8Vvu3cFHI*f1eTy}IE2fF_Te z&H%8XMn&Ov4+)?JG@7SNIP6b|s4&#fHkh-0eC&U85SLipyf~=RriQD# zZ1}2Sqdp1az!Ahv^*pc(?oJqVKuJ!Q&+@jR!%i9={9)$co&z;GBSQS6BU6gt4L8*oLM411A!f zl58}}q(loDb7Rivbz8C2(Tw@n!9;CG1OY;1e0ka`R8b>|^`vJgk z%a#fd5Sy6hJ*1801k+U4s6_wi#r=oO(gp`o?N-wm0!!=ijt5&VjkyeR2Dl%!p5D@s zR6GW<+8u7NML@55HhAZnr8)frg8yHe2{VvfO#K6fB*YjyF`ZGV^JM#wLl30W!tZIE zZd5HFiS8WEDbFFi14G87r_Vm$mecCUWB%gEeMIjV^rUHAR;{~ZouM-<6AnUt(Rm>n?xy4Va>OR2xVvm%KzRR{Y> zuen-or>Tj}|EslSYqZ-};6%c{b2Sd|e?g(F=Un%Rq>#-teH3xZgpLK(FL@vZN|Ef! z(QC&+dDS9Wt&I6fy`gwADA&`KP}IZG(E*mLo#9a;@aJ$q#8nZbG!JaMGx9XEGZ?TD z0jsX!jxk$kX=R`=?+|6T<~vh!9pDdTWcF63dsJ2x*O#J>n)VQIso@C<>2ADwV7G0Gs|Z- z>G<<&-h!iC=6g_RiAZRKxzRuZ?re?)OX4c_&N((w)I0K6m7mkwd=ZU|f+9>QkI(6g z%_I?%soHqBEj)F(qU{Nk5;BW*Epy^F4}7IHfXRvyP+{e$UZ{`u^CtoO*z)-mgob0A zo#|o)1FwTZ9&;_8={<88c*^XFUvZ82Uf3WuzPrPuhjYH6Uf27(!*n03fxspyo&^df zitudy4vv-Tw+}q7m+1aMegWYj0S$l@FAZ?b5C#JA3hFI>mHezU=!`iphUg=4Kjlk| z@OA%|pxfaS{soaBGc%pst~M~Bu>&$t(Cb%LekQZB zwND27oi7Jp#jvYgEU$NI(!=bu;{a$9ASW|r)7j!9uo#7_+i!8@GP#6zby}P-f(ACq zt2-36V|ZMREph1sG z_uWM?;`8%!iB|m|)dEQr3MC7x-hrD2-(?OJo>8z=*0?|O2E}O%-I;)Fiy4>MxVf}r zlDzFwnm-gmR!*0iSho#VY??Vy#vr*ClkYH!^aJd_0pGaK}gsdvbab$GU zeQ;g@@NFvqF#!5jRuLwi0e&T#rKm5p4j>uFxJtkpSE^BcN}~<@pt_#Opr^fg`%Ca` zGM&9#R%^eZQ#PYW))i=U7LZBAIAqRVulq~LUeFx1cftp*AVdTrTRyIMOzLCcSsHL^Hq9#UM7tc z8xfEi?!-T5c8@An@Lb>BY2Z%fiA7Eke4dx@xC0__7^mCAEKd8v(Ohm0)jG}$%&x47 zPnH^k)hje7h;G}=MkW^h=3V=oy@3^4yUvsuu+f@HL_&1I_kQ`D9H0>OOF~i-8n~TQ z6IUpB#h8MkW{GL~uqbkWj z(9o^>p^bjkpDs8zbu51%Py=I;w#+~f6m!;mJvY1uu=|g>_@0x!aGuV?s2gW0by7xB zs)ZmHbqfoDD5elBA6eNb7WVJQ8yoj0nz}&Hr=KHxZWgcmIovZ_`;EHjoTh6YpfTGw zE)@-ng$bt9xAgFwt$IO4EBfs=uD2fstNTXC?={^UhGccJG)p5Gm-Ss#&}2n=8c@Je zN62|mA??V+O(mQ31g3n{p8^7R$nL=qa9GBQ!Xa)#z1sX&>pyhQ)*>>`79@q%?1EOmX<)kB@RX%Ozic>mCqTwVY%r{2*9I@e4lPF zJ?S44ediiVXBYh>2!0DL$XKub^v8~MVSBzpDRPGxacLq*g=V*!N{p$0z$rla^*9u7 zP|_jPQL;Vwyn7Ac)YCxVJjNo_FaeWcO`1R6l1}-Y%%MIwvw;6AU;C?o*FMQjP@65l_KSKuN%wPXHSku zs17s=9|=GkJd9p!w|rXx!y^Yw8kIiBO%YNqUiS^0)jBi(I?U~$2sq`yw6jd^`q_s( zF}NfXAbBxMD}SaxF-M^5*b7h$Ur#FtmI;Gx;&D{u12Kn6o3K$N9v8h(1^#ax6ww&x zp|?=1ve({#C^TvA6p$~Luywu(s8G|nh zr`YU(g3D(4;cl716{i5=7w_W-aIANzi*SFLwvuAvyILTU_RHhVZ2P8cCm7lc%H6j) zR66ZzDA%(!6)W^RW*Da?0^+AhX}8mr-vyf?ZqVQ#a0OPLzJe7WW%IYvl197bG6EXi z!SvoctIHiJXytE=1_IV=&tTJ6(Db9_)pPk5+nxWJE*Rn}#qyisGumovKx0JHn^)RO zj?-4NT6TXD+qllm)1JR9@7aR{m@l3^SwSi4hK9)X&(QVhhCc$al3#nHh6%c5DwgVh zX}?4wX#snJNG(@P##d3RPk@n!31*c-?w>C_KY^rh8feBAQNC$i0DtWKWZZj>8vGVR z=;FF`|9ZNM)kr}z1P6{3%n6AK-Jsg)cB%@-*T7Yqo*5>+ygSr_QP5S_F$Bn$A)&I? z2`O-}&#&q+$EmQ}qUutiui?(eXd<3X<${nlNkJz!Ht4NtDk8Rl_C2H z;-Nyj@mr(B2|A)B8l?1$&OA~l*RKpp#$RK;GlsCy^Ev(u%z%wlFjgm-)PBd8q7g|% z;Y1?}psvr)FVkv2r4qC_uS;fb5WY{pFTc^>78tM06UsOT@V>OKu%Jd>{W+Mj;6izD z7&H|@nFsDG8k3U7OdKOv22puu9eCA2fIMReeW>X-dL+-5-_%P07gM#!b}_`SfpiUBnq`OqNg+(soww^`FcD*C|eEyroRd+Xutm#O3O5Plw=4fwLO)wN|XyOB@iTvVxQH?^#uDn?(x8n8?)4`*@^zQ-bMdI#%e zZ?d|?oDt;%vm=_}(wa@;TbwV@)1n~F{~$v|boWO;=F%CGkxB& z-0QtbU>f60AlRQ@sF&R@Fwyy#5-%}4Yt4?UQHuKwkMpZ;hNwnoej=})fX$n?3H7{Jh1*PEb~D`mbT#2=&%FL1|CCXKa5y&S@obfE7UW zflp+ep9TErSC%oNWz^`Y-}~C?f zt_{6S4A-`yk$V*AHE#`Vg^)esB|S_f|mMKxxMht?wSX4v>sGi`zJ_ z*VAhwL@+Ux#OcJx;;ckINp$@l23*r;%?48iFe>R%E5UPpt8N>weJ`efddo+e4#Mc% z)#--2Zfr1VZ-k{XlM)HsR%LPLcL(|J2mn){q7>%x(1ukX-7qt=o4e#X6?NSojj6hb z2YQR=kk*A`bzN|C>4`>z8uVnWI1dnJLhVZVcjU|jkYk4R7R1(KUfWI-6>Kr>b~rk984SzKU>r`vIX zs*vswGR$Nw&PbpJJr%-mI_wv04VaR!&J^B|sO>|s>chY_w(Z6STo8C8m%2HLpb3Ey z3p@;rS~}@Y@GLp7wsM;1OH!B*$AB*kG*vT`-R?N~xL*NN&c-9xeliEy5GDav{Z1Gz zp8d!K0dLHX3C9|wgZS)G%c*Fcd9(W}Y;Th4{9`GAc46#%<~{c@6iYTESogi-zxgck zXA(SxN{J=rp-!Y<`p}PcZaB?kw8QD9#i)3pDgAeXuL~9Vy^tpf~Sfp+=PX zJWPs}^J$h+C4l-OQIx<(c4J}ThaIo}w-%AbFVPotvBKgt$GEq4^h^-K-XXJ!FyDnQ zjczW(h<+%Jk{g{K021{}lps+SZ~^f7Y6F#wPe#403lp2?VHgy|>{TOCIMbzcPk(^? z8s!I?E`#syAg47<90dJ-J*S5m((%iCM-(p?;7246Z>kSYcVfrY96|*nL5h^z0G<_Q z=3his4yOH;UhbA`s;_K0D=%>Zv|vZaj2HqrY<9~!F|7Jn92>A+_<EI`0;u- zOcbJFlU@N}@IrvPa^`a59T@xxzhu4{%U~EP*75wX-;ZTOedfp2V}!Y0tkB6NH5iRo zek;@+j9A&-*+%-I2uUIc1pa9xAlLG}M>l2N<%z~hqX4JgWjMeMbYqj=HhaTnr3ut$)c7dYAFrZzyt^jhL1f;Y-{EcMgP{b*M33c^&J9cLa-`l_-&oJ$~GB z3%OkHb~3eO&s9p}b&t^|F|CXOwysj$i(q2kcmGCOX$TMJ^x@vSUllh^VL?e8^wUezw_e)^Mf zmbebtU=BF^>?CPgs3lL_@{WLpf51UPkjwu{t=`!Epclz|65xCY!1=TMoNmXQ5zhck zDm*7mIK%*YjLqZAQq$X4pm7y)sl+|e;b94dzodupe|J)ALD}$~e%~v%!Ruh?-#AGk zIsk7ivLQ=|N(rDlmWO^sq@`=zc!Km!oQcy1lUmV+0BG5WgEf`q~FZg2=*vqGWeUYF< z#`LpDY)Z&74Ve{%_&+NMan!hnnmr7#?Te<`9w$XV3O`+wKjLVUS0u9{o)0{>df~6X zm#}s6lP-qJAjbBIo4uY)7SW4|)PhuKus%UT*_Gk-FNn@@tak~#C+Bx}7AnTMt9xK} zU8Zp0Tph;c)wcW^RmovLMH(2>exoeRwvl%LN~k#ly<7NCYYA|YL9Cy;>jpg122)Ci z6*NXVoIX~Om6)}+?c$e%WgUegjL*wj6W;+n>^ctrkmC>74{ zbReI#mGOQ%=SDF>m>4ie2n48NoZS|H| zj0I!WLW~?4K+hT%htVSXFUlt^a*ddJAN&r+Dp}J1BzEs?Lujqq(5S!M%%A`bVnEzM z`%{>PL1%JkZK`74zA%6tSa(foDdn@_FWf($%6VX_)`B} z#aF!#SsL;elUx9lkP-Bz@5F#_hH${4KjPvY{vLS_3&2nwVw{p6|1|=bz@fGzWSf7F+`xEOB1U|Ulm6d@ z1_jUqhc1YS_xz(o_-CZLXy1w87^}qpUmQ48gh15}ziXhNE#EDajVBGAg+rzKd*c8W zGx3MKt)nCRgo*-b0_<&Vm-{%bHbB8`W%)mP; zi$m|kfxC`z@PA(xIEXt+GtP-<@!9?Kfu#SEFKgVy>VRgSLoMFKY|dq`QKUA0>X2?plf)4rzm8BKwl}@BOGwSbpK!Z50#|y#l7No#cOa3$ zQQ-oQ1DUUN|FkC3@00lY)4{F!-&@|=CY3`{0eO`eB20ZQ23a|6m ze-+kin@W?=s5{0B{e4eKa`4v{Y$0wiQfe8Y#97q-?k*a-f6vVD(5}TH)rP$J>etfqx&)xKwETp9eBxBBz@4%0m%qn&gWA`uAvo+fBaIW1Iv3F9$9_7c_KW ze+7VOHw8)0zwfw#3QSzxAQ{sC5z&FCgA5Q9*P^6c{%bP7YrdPoQ$At-i`^~`Xt#~Y z7Y{4>>+JD>`5hkJoJjfi zL1lRV==|7B{y%{_Ad2Py&KDKWky_#Jp@0Z;C_=_T%Jg3&Z40jKnJ47qQQ}~Uo+Lvg zRyehmKkDvl`NZzO_Wp2|n;wZhhWBx`9IRo7|qOkpW#Ei4avz0987p8eB2- z`p(i}(I%03<8?ZsQlpVKDVkA>LiHnBYg>qQSnC7D>@DT2SL?FEH+kTUzCnTtbdy*? z_7|5yh~$+DpF-nyP}0AQlJ73@6Y=20N6Z<(<~F<{l~@mWHyk(#`9wI-R2qzgUHc@r zsjHX}x&BlMGseVMR*JGswge8qL+9gA!@gtIpTH zdN}z+`tyr<^~paHi8i#rc_;CyDLAWbRfe_nB43Q{SWqr7=FltE;VEk;Uk}LIa;=xqe z5|_S!D@M|RRAN7TG()Tr0@-Poi-Us$nmxjuX2=7>-bE#*Svhf!$&6d z94#Q%v<>taZC=bP{apbz@%y|cu0{UJsilO`h+PqtXZ!=5pn zk}wRhG#XN|QIZc^;jQNoZnkBinU*61`-zEbM{*GUt96Vdd#ZiQmclR7pE5}-EBU2w zp_9%&QEmBLXS1?FZU+9DhzF6EjVuRwRGj56j4KyPf2x8xeO>2@4DL^607cnpu4Q4M zwhM)1zXZ76+OiD|+~R^|DcY3X=*ZT+}Iu zFYS&{@PN^AHo1;_r5SZvS{IyV`!?wD<|}|{@9iq&ne>oNKjzvoe#d`$=8Cy9ljeqV zhGsBFx_D?X^O17hg3+`^bP@Dy+H(MpqOS7!dBGE7!6r!epAVES2GH2`KW_Q`?O$;; zn94ha!C9`lLQ|^PyWy}IIiP*CP(^wJ)94@%G@L$7qEdj~5w$ZjYaW2mLKG_Zfq}Kg zSM(v~DzPOrvLM5|VV9)uB54FOsc!f8=FP09JoW4|ArT|PBaXwZDer8fH6JjL_we+DsIHrRH@YwY*M(HPT^EnC=s1 z?bPV-LAQV7|5j%4Qc$>BA8=$T!7~=ag};&rz)PQ0#LL3_qH}|b)87*c`S<>U@Ks6~hJSP&du1!L zR}+c!KhU&+ai$#Vf5q!KrEyj1mC!2 zK|-ZgLwbX$mZCZs4FfuMs8kj!}{ZUn+>gIdZkk!hF)3`3fSiG!3L z*7LWsI0{WqeKQ!CWC^GUanu@zUCEoy{5-S9#9kxR8!)R-&GMb;8^-;SkW7A7)Vc-g zHQk=dw-gV&?NxKsSrZ#CHO3jR6RJ_L`JQQEiZQ3F4>K8ZIz$eH21Jr3Weh`G=#8+W z1}L#VLhVZ%98Onlz9Wc`T85t&@3OmfL1?8-1mi&?f%n0XAN;BhjQBu&-0o9bKul}E zJ4pWFgl60|gaT~v$Ib(|E!-(>Tp^CG`znQHuYggWS~8TKoVp^X1Xp)vYHavOkcx0a z$T9*5%-H~#vky3Uq0#BGbVlJY>>;#4O5vXIFdUmGF^(Wh%wtD@0*sG%Zc>Ij%aAoj z`jzk3r~CRj7(-T+<=+g!`0m~ncO{57<$hk0KBD1Uk5yCR$wBZ$Q z@!;=Or085J84-04u(cGeE6SS6c{fJM5lMs=?#%eN!Y6G%wUF>&E+Qe)D6ERJeh ziz{&O@}!6Y7>y7JqXj%j+>m|?LK_j6#Um}=JYQxv-K%`;?7X78NLTa$;-It(^$9Rj z_@E?-U`-uR(vUb!0!ZqUVRZ%#IMrQZ+VgGZ5oz_iU?D3IPoXp!B7}74!LIr+DNU3P z$YZ9!Ui5D3sRqHgW0(yMyVjp2+3hRfpyM!+QJ({eg&MSu;`+atS~@mRP;yX| z1|k-S%Jk)P2u|?d!ZxUi3*%_COf1pmSL>PjqcB33RSgG92mp<`PE;KVum*_Y-@Y+c|U6E)b@KB0<&CXVNK^u_Bx-bHf|#(k`4E z8vHcr9-7TY6X%PdZK~!M|L+IW85BKHFoH>&yH)+vkVVaJP40vnYUl=df+1@#A*Ail zuwONmgy^92h^LOBX}+S@l1eLmLPkG+t)$WvXw5SeO}{23D1@fb^EzXHnBW~=vE}f# z`6h$9cE!i(?(Syp_NU3&eInc0eEZ|RJp;j!W@$*Qjv^&_`%~xwx#mL`!I=tA-nL)S z0F9jDN2Dm~_Xza3HSj4lEny$Xpi2iqZ688uE_d0_6)i5OIm6{WE-a$=-rw!=rVCr~q3yHJdM zI|SJ;A3^3HqP=)~?TU;LX~EG6!~Cogb8BdwF}j*XTXV0wcA@%r?y`of2%@MpYmj42 z>CLNXluwo%zt$qoC+Spd!M`3XGDam;&tb4sv4B@?-(f(g`PQ_C3HQd7qclBxtEd~H+$lx2gAu!B zAtoVT?SyNyWu_UJT{Um&KB4e)qzY5KU@l0S6eRZE_nM~pLvG~h03kVAH~&Z)L^sm9 z!JZfKWB4s)fjMj#DyPq-d9(q!&%XHyQ|VwWa)xY)B2C22SbSho;aTF)sb^dSEV`VzuC*4D?V<(_C%uA+UvQOAeq3rr(w6u;t9|ow^MQf}8e0%_t%y2g9n3GDGhgKpAB&Cs-?hg-T})zM^KG=qGC2M9I0LtxD{5x{7L z(8gQUn~bu0z;`jhYe*>|gn#;%c!MP)G?|(jOv*!VQ92M(VR0Uo)kuPmR#i{Vs^68c zBg%Z)%oBd=7T~8aq8dR+B0=|xA}a3T7F*U=n69OPKjYOZ#@{p+cHm6r5Z%1`#*7<) zpkZN;&=*Pz88PCD8>ge1l-(^8nL+x`kkJ>9CNZCWKNgYd=0tC^J4`OX8#8Nmt zc^euJP7{MVGBfA5Z^9;`&?$)v(NUmL325@O$*o@nQ9fTIH+$+EXvwZxLlq=-_;D5g zv_qbWxMEN%B&JuU9o9xGmNv0zWa5T6C8VO=hXUn5xeI>n`56>`!LjN-(v*Joiu*J! zU6EHNS*fx1d3dOU_!UeVw!+gvMY#K3cliUHMstwg_9|b~;Ru04x2A9nJ)N8WT#v(1 zSUD`F*fSn%lNi#8pRd@~&A=g1GiR*`!f6(_lyI~DUhq@`Af8kzJ5{z>q$WtvWO=#y zN!sC&+HgI3^e9pbq=+V_Ev8JXV9ZMfIUEzeGT5o1ZjLoqE$hSE$sngR@^rPBVjx zGW!oRi3B)HNWIZkQ2ED|g>>XH#B_*cd^!ce0!Pe1a5Nq|h|>*K_q3=(xWmvJvPoC> zrjL^XkI}E6^2Q*`I1^1YcU6S+1UI#*HyA{`v&u(S_97b$UaJ^iXZt}2@F>X?A6MTr}~mbo0IG)CR(N20Ile=($dr$IorA1|z8WWEe zgGOCC>68)VVN|1JCf#M5;m1xetQoMz7{d#9@PEFST0KpH~aXl0qEBE&3H_-uopf=)2qy^73v@ztQ6WrG+eKB?_4eEoDE&KT$wJ6 zR*(EbqZp0drW|Quw|S6PD{(NIp7UBVmt6CC*jan}S&-Un9Ep`u-TBt7(qjGlk^<*; zinil1$d1kaw=yKW7lY;xfck}Qo>mJsvHtamlmGBdAVGU?_QLkZeCqj_M>^`dr+>kZ z7e1{f^Ju^XYqFt-c6^bqTEMK&e#*%;7RN$Ie*Ttyo+AlM-)k{*ZB>9#ptwBiwk>`| zBFkiLE-vE)wMw) zPpX~UFA1DG(qv;?r+Yc3TlWULJHrV@U#^bklK$|NzFkhoqRe6Y)nYe(f%o5K56ttH zIZCecTjinF`x)hO=+x=+yolxGe3W6j{pH5#ybHw2W$|Ojk3Sk_yPJcADMW)lj1n>U zFmHVpLg5u7bW`h+=7eaeroXE6MYY-O%?G%?Wq&Y4ugS*shu|3%@;{q<)u^}O?UnzVl!!>s@U(rG(Bco*$Q9SS z_Nm)HI;K-07od0D(VUInV(#2n7_v+TW2~}<_dcb`;c=!uH#K#R@{QYTcCK+c{;*vY zFPkqRDc5-U=sHnNi^RVqgUe}^zEy8=_j&QBStQq0j~PZqr$cmZ);{B*ev4BmAG<>w zop!Sjp!ho64nkOYHbl^Dv;2-Pm)^DPzE5?-(5Z{bsB3x2bCpQ1BY%3fCTcF9W3IXC zvwWhgr1x>wCNqEDHfPgqcXs}MVKe-+jmI@{U4601-9EyZ*PWrGA=|}!1H{UGw9+fM zilJP+3Z3Z&Rd;*N&OTY*eckMy_}6z)D6=0iVK%r161gKau2XSKfejYgYo>ybn;t;EBJD2bIoaxVd^dST~#!fj{oJ@ zFpb^$OQ!NJLIn^`L1S}6-MrTuD@nGCOhm+pH&@O2ujhrh>g-*LD8>>Q_i;yQ{yxwvy#$?h ziboHWbUb2e?FKwB+v;EP{}RkIFJIVy1VJKZSPB2!=ekIn|kLH$IxLRin6Nur%(HTdu6xS(cjc)N8S}NtT~4vD-Y1ofzHxKH;xx zUp%p~_es89n%1yhaF_1TUlj7adXat*#sHqUDJbP=uMgcEOw;V&Tp6Rzn|ot+kN7=4 zJ+X_8c+h)_cU##od!_cw3%_Zp`sb$`S8JORRZ~>%U-t3TL*)%CvETB6^gKm;;Lc(j0e!kl<0hAV@23^_^O6Z^OfnC5nOa!@BD z=FPx?eY z!uT@1_7S-fa4Xb2j8EBobu~w2yR5qUeq1=;Iy&=occaZV(cCz&iNI;K)91$X!fIt_ zsyNl+v{H=V2apME{#28v|k+W}><+z(<^>E&3L z_2r9le2;o%HzyOJF7u?a7kviu?L>loji0>mi3pX1mVa9#P{GJ-1mWp3f=gUSv(nuk zZ%*$Zyaj?|0?2z1$ z{1UPO4Z<^I<(>r%y=XtU`slM8ZV5Oje~q}`RAo;E1n*>xj31Ts-|>*baUZg@KNy^r zN;p?PKBDu(YZ6ol_FQ7kPKRq(0uKtjL<#U&#>9GG+6eC-M2#uC+m_CgC8n11hF7xq zry~0X9lVS3d(?{`H9eZ9QoQ4 zZsA{zd-A!*?qvFQY3qj_8Qoy8!6aiROu?V+43@jsYEnI9V!O^=-fT?P&R?*Z&V_#z z)duP|&OQEoVrR)OqO_z*fRc`BeLEN0Co1gnnKXF#X0B830P8Jal>lmfI1b4g9VJja z6pkX^s8NZy+oJ)MUBz!pBPf=64E3)bdGy-~%`__6Kb^R;^HN|yu`81p_1+u$W?$qb z>ac#-n;xJg=ZT`B{=BZS8cYBkTdY<>SdH+6ECp@y;b7`RtkhDWI z$K&2n?6}o1@V%irb1^>S>43dxXcvl|QJ#IZP_LK>W)Z$q%gSz%9bKr)P|uso4+uda{#e#idr%W7r)@Oa^0>p*B*Jm?a+2xn=HTd+({{$T~vE}f?E`vjN>~JFdcn*?DOt*$HC(H;8h^TiLs`y_$)#Gn2x%<@dPRDNoC)(ssw&V zF~VywdPK%BA;)_eoCTEOArrLXUJ5bjY==@wce6Z#gFo=6qo zS&s`D=NaN6MSq|8oGAXCrh8kHvK5~l{o;z62X8cj+t%~FCE4u~^4DE~dx{%0g98{8 z0_FH!r|#3xcB5qhP{}w}K|5IJNv{3Tu58}z3EGc%B@a1yI>|+o$Iv8Hd@F3A`g7X{4Omo0#C!aJ8x`=x-a5^|LT>LHLl+wOgC24kRqs`mi6oWbvKLS*1*REK*}A)5Ne;CmGz>=Pdi*`; z9n?&lPAZqQHZ(@e?-hnJ^iv{1wMnB`Wx9qQBJhc=K}E8E={TchM3o=nc-^y4pF5OT znx|C-C!2(m(po5=Va!M;)qVYSv72sAk`K-VdJQ#UGoI?-7I65qYJSpcFjjqhWOPB% z(PAhiPDrm_VWUSl7jl7)5+Up_7FLDW)p&ot5xqIEMzQ~cS98C}8Y7@R{Y@&nXT73w z)ehQuOD*Woz32)(O<1rU_JLZQ*gdD#+?4Rj}2RqNz zip-8|ce(5MPL4IR8T0GXGT^?lJ6-X*$agvN0)y5Mz&;>26)^|#Zjk`J6+L?g0Li^$HA*G@Uu$`zZRgI^a51EC_{gYeboHm)dJqL0U?xoqEj0hN9iPwF1F9pW3H( z#qzh)POnfIj)E-4oT1VI(HZC>AJ43Y!fl|mRdeAgw}Vqr-J`mil#pdDamDbvD&0pC zT9|MCVqW3NJq&n-onl{k2((HM5$$B|xhliDhL~Nkc;LAuknUuJgUp64T)Fb1@}=I14VsJD=W?HC5+D zcwn}-sP16*j?wFCpd5IjqrW8x?ffxS4uD|A;&<3DzOBs!!Hl{AqFVtRtfAtaOfjp2 zUd=+O6u9~368 z=X(YU5$DU1%|h8juvR8fVc@|HvDkifny{hiydM3KmqaoTnXaG7Nd&%J z9a$K=tUij8wFvtYHdHoyFS;LoHcoh-`RO81#W35*AA3gG*Y#`q@$SkP@#1vi7k@Bq zpZXTPy4T%aRV>C;jM|aYIo&j?rdZL&nEJ>GbH!QwLl)aCewF4WA6NsGny9qWj=$Wv zGHT&2v1UT)bR#ny`Z}mvrtS9PsNl&Sk(4&K^3NOfSz5RX652mwD>3D?OdBgRYZng6~8g%{CKm_=-= zXXVD&a@KNHPTJ8Qh=w2j(m16O1>r@dLeu_t>xk4P+Rw8bN!wPS^!GmLgQtNyBqw`b zS;%UA&}KY6vJ6fs;yrY@1D~f!9`-(mRu!|1Zx`m?n+`)M#G4n!xMe;GXEc=&*eVCw zv&U!yXeMpoc&6|zsFDI-krSJ2IW2mbPjNYO_I`8T>>f&u+0Tc|ebNec}Z7>HUBSJq7QLP~4CfeiA-F zn6t4{1>F98y)$a4Fei&nJ5Rg6UF4XrwajhJok^F1cfCAZ=q-Cp<@1aRtrC5I!2WRe zQH=^h>gY%{iV`S2X%YHhv}e^AAYJ8|Ja!t5KspT;a-HEUIj)Vv|@_x|cQSl_&@`U3k^^qt<6hQAhhqam5S zVek;xdl8Q&nVPKT10DU0MS=Gj-#YvDfmrs$^V^w#fwvRM)xsNB2q^JFCJ=`(oMOZ^ zGv<^J&8wtiHOFSN|B0q%hlK+b20@s;(<0Jk7LV}>+;MN?6K>!7P%)z2_~CYUSYpNZ z%GRB;-J3-Sd~gDPQk(kYDr?VqC#{hduHEzhCqkko%ox5lsJ)}mjeq01L5DEBSk}^~ zO%=etGL5{wz(9EG{S?gJG0R0>8@(6LQhRZZIfZO3L&h!rC4*t=i-WU!-*6|UAM)kU z?SIk}+Le_?)O;Hr z(EG<H(ZOeFZ>FN8qT2v+ zMxgZrQyuf*DJw(8qlXYiw_fxW>}nf=R@cUf#HZut*2^W)Dg)3@`QdMT-L`@3!s~wR zDhE@G_N_TpLhmS^fAV6#g-I?^CFy$#2nhySfoa)}n0R%-s4UC=egl4yLXzmik)a zQ>?krL!g9KO9tL3`8Kp0%n4rtwz{rS)s;o{yG}Pg^$zpxf{6{hezrviu|Gc4b*GXX z-(;9o5s%p~=yOqA)nrt!!=uVVDid%{c=0(;y}^uf&gddU=pzM4A`fKTwtI0kxxG2bF3C)`~TL?IkKTZjuF{}tIi;#b;4$p{tw*& zFdhzk$k)!WX5lc4lgLRs|FRx0FbA|=s%fw#u~^gmmj`))DMj&up1~4sVEtdJMC2vD ztlyAK->KQrbBiUVh#6HRjCMMll;3}TWG4UcDO&Ai8P6?|a7AT#7-Zq`f9W1mM&Q8s z!^w5eEnh{lmF%9Lj=v7cAi<;tJ$E%h?xETI1 z*8h0iSQ@BSt|9Msvdm=Yd(%{=RS#GNUl^{m22I&}1U)7%b>E)s`QqL;=rGXwdWRF{ zS7iDESoke?P5AuI(8?Cy=lSX>vHqOEVGiOZCFXP+#!-)cA3Ue)Kz>X+YM~IGdcxLQ zM*r+4?hIP%)!YQ2yEtI+9r%ZXIoWf(M|GBhu2XH+*taJRsh19IhGK2)#!|4IZYbt^ z1)*&7Jf8b*YwulrAl2@$D<_)s6*2HVv$cfe74sS}a%p_@IW0~ve`0jJ97!!fpZ@)% zCGar}mua72{Pi>A)d^SKW|r^M9emnj5E-aBg_2?K)fcDHM%lZ+O5wrc_R`zB+?PN-2QhC|?z?T~B>6yBN zqeiDSr_ycor;Z|J8Gl7k7WQ+N&AQJ;`*Mer?%PNvZ+8PJ03S~lD6lfWqXgunwXz|4 zD&R~tuuqg)k(%;QA_EK6`np`|KZFM0k$H`cLm72byR|>XdWSV|C0bN!&ogKoND4aDBf;XA1_z?cB*9D+G zTqm*FTL5#PBn(m?M$fCm*j`k;AwJg&k()|PLS4Oj`$cniRd2j#hIdr%$q5_%Q8!WL zh7Af_whOz9I|%>Mn=v9rFQM~-om}q65H|G<^*@8jEFFC@_gS@o%EZSu;h!=7vKFU3 z$U-tZGbK!=k2;o@x^)AFTHCla6r%~OnU!Pcpa=Jq{lSzFOLN|Ol`(=|D>9`?tD(04uk>)yU z?+d5E=d8o=l2<0V9KBN(TZbx@3XhQ#$xaY-ng^Di+#ZSVC)u}L4#zAE>;4C==uan9 zGBtMUxn9>x??2=!W9fIXJg>@?v{ebSPbXyvt}Gd#Q^1R3&scoE^+CAh(hhMFgSZ8( z*exc8LH&em7UQ$RqU<+XN&$mD{Z603tu1ax=Q5YQ%^oDCPto zT@qq=k$|L4H0xjf4+H+6-ydLo(Pb zaEcOtZ*oCj2tCzfnjF>$mJABL=4?j^+!r(ZAcObLC=}%(vpi z8rl|S-oX=O#6cm7N4bfX{tc z^z3TIJfD;Rn7R*vY~!MT2NRtY=!B7LBKp4x0HOr*^^gGKbFlaP((m93TW_t}*^ zqGGlJnLpEerBq$9G?EI2)%M+7y~#%0hc9%Oj~6#ApWc*z^lkdZOjL7(3d#I>jpbxK z>&08;U7rqV`^D}YB{OXmtdVBnNFDdz949jKE~Zq=cF-_4t!I-Z&6U7AxRzsW^Am6D zDW_skkX<&rDSG9<pB(=QM1LD;GWTMJYO{`N z*+QrO>9a$WL>xs%x&H+_mp>6JNI+5AtQQLv>p3#nQ2)tjTQZfz|2`)UeOP}C&cw~O zB1qrD{kP0WwB1z*T<+$oWQ)8o zlqg@UjZM?3J3Y!mCQR0I3OzrZpLD@tJZ`_U%N?0-zuKFWEYT1d2si2MsK_ zd^tJ}X2C4#wYI6!t#6wbcTA6klBO^DIa%J-EDJ~myU{V62pcFDwCdKQOXAY83`@6GgT%V5cZyEf5>3-x(>G# zQ>r$V-dnmuP8IV0F2HU2jYz=bmzGkq>j`tT@MGgE0uEaF3yb8RfF4uQ-Bf_n{uJFL zm0T;%P;#;XxQ$x3-UgcH)_m)0yU|x|)b&yuSdaIt#7fSZ@e77xA~E-8nP~-e5))d2 zE8@4TqopE*)*I|q^bC%b4JtsUbwmP~S&?7TAUNj-VThW;gQ|}<#Dk#Rs>B*CXdG;= zBY7sU*C6A30TlAy4wY7W-JDM0|CHK;G}RnJs>DBfU+4UT((}1;q{aH zKGFf239+awtxA}2kN!GoGv)a@(trH@ zY92C0pmqy>CL6He7--tbv>t14-citnco{l$!8=XLa}VZQO%G)7H`pxH4!RRv7?xh- z3NR870yP*w)yqv(3wKOMUH!!u+i}oNhQ+Oa`*l2<+0f@@b0_`Dg9rx3cR~gwR>FmH zsm5Ut&v8M({n{;*L0Rm$@xhG_JS0=r97L~HMk}e7>R4E=RrNCgb?TS_+^PatWmw-j z50^IpxWvI$r#~#FOzOl?W28%vf-aA zHaTqp6zvdKsroLLpnZ4nYbrpNy;?a{2YHGMd=PP}ua*O-!)Jh*ib~CflL~|zlak)u zA|dw?fvl#AKJ`s;miXZ?>&;Es_y6eN`l0tlrhTX(NL_yzp?C8-6z=AFR=qf2wn*Mx{srYAcg>T&|yM0$&@#8av>q%um22ldGDZLn;?8K!GED+lpDN2{(Ocz`SVpbU_MvV{ zV!B42PP-18a>(l-qqI{#+aq&V(3>BS=~y;R(Vro&i zzCO1#{K$30!|aKc8OLA2RFS3Cbot{aozP@5NrH#1?pO-Z#QNzEr|}D_%@S0UG&Ws# zn%S*Vj=7IN>3Hiuxs>R+z~+nngv0#y=0)L;mlEXVD&;zyse+zvC03UwD=b$KPjH!b zO`m(2`RI4^iR+ye-~2KKH}PpqK)ZE4jUpVIjvO}AXQ7egamo#lrj`bb6k^ExHiXn8 zbU5toGA|X=c|PgME6J4Uxy_qRh%@n}OldqlG(f!0Mgtzj-b?5B&?5QnZlY4H-{g#Y z_RW$BFPuSEchoR=WPvIPmVWhO&~0&H1ZL2N9bFCzz&If8!WqhFlF*LE)j#|J1j4MwIk4Z%9M_Bs>DmD=Ru%2HR@(J8sYHPIULISyT8=p3 zlZFCkiDt6s_?ro7X$z{v#Dzz|frj7;TPs7Z8X+}M+85o^*(~?tgr>f>68R^8f6Rr) zF@Ep))&<;MK2mimJQQqfQU)cXg*sbnY~4KV;JG(Jya}@g(Sqg)2DMdNk=!y@c1uc- zZ<>ThRg*Uli$7qU26^H0e%}n*kWRj@_Js^0c7938wO4-s*|u~U%*EAbeH9&UdTM7)qPT~W9&DDl;i=HKX-&gugSkYLLkvNV4>bNP0tJ3yS-6i z+&7YX3g|yJ#tUAt#wJY^p48n^5`|&UbS?swFZ+?y2Gk1awDzsg;3>f;uT=0cf}9kf z{2jy1aSVT&rb9vEv->XB!Rt>KqKYgIac$ETmf`@=Q7+S!?*`)g^iXZICT3DstXdeQ z%;lq5 zH0GPgezfpSPqM}D9};X{bMI4mEaUJ^1~%*n`;4*=1ibrRbu|H&4B_hmZxfcRMgzol zl9W>14;Ivyy$+bVwERXg_%#hYFVykxy3vG6(^?{r&(fnF)Cr@9P_yjg3;7a$uY?bj znIUe?g6_<^NVkW@fP!(YUc1A_3jkPdK?XwYHyOPoFkqx7NSt_w|O_ zSb7+|^JjziU!MeBe=+gz^66+98rYYY+zNGn>aK{$Y}Yh6G#I1h3=~puamS@gQC%je z>8^bNLctrLlV_o0M6HvzOKqlv3P^>BO-e8c5C@mBKlqbRueJtEY!`Dn?SB5jiIh}` zxR{i%f=iR^JiFJHRdnAN{kwXd`lmPMKmeT4QYV~cIYn1@AK=GLC~Z+ORt9kwF1gmAab4 zqtZ~Pyv1-jU^?}vZml^@Ia80;)vmlu|6-uU^*|#RrnA?aN_{av5AZW_IO=?fV43aT z&aUPUjWZfq>-)K%eQMQPGkgCdoSDszt+BnVaq;t{;?-43s%O60itko0OG9d+h`oTC zXs|1j#k%INdg$4eELB~wnyNMICPJOmeKtyjs`+E;F0<%DX#AHpj>mkrEidZfo9WYo zJfC05`YeUK$*S+Z--IM7PQl8xQ~8p{@~LBLR=oQZrK6}chEm3~pE>$9CziGur0Hekw4$6+@A#Bc`dF zUueth@EYHz2Y5HWCcAbcm8_&+fMIx%fGOAcaC^l))T@8AC~N1++C>kmi2z}7(C1mK zl%UaMVL%=WslU=3hBc-DN7hsduT~XcqrM6gepFbhy!xS_5fgMmB&I5s|h?$%p8y zSUK#9O1nl0+!AT9!>4cAuioXC2$SXBcbss7pq_|Ohl-pK0~?}<%!tOVr}053qfwU` ztgaO7Q_m@(8f^&)SH)C=O5F}hJG#OiDap!_IVN+L5y4Uo5^j{ftjK>@U@klqCSs9l zaJv8Cg#wT0I>$9HFlOJA-mqlBAw$=tgN8L1tTqzS63ki`^xDG%gG&WUH7St?nn-qP3Te zcZ5(lr7zi9TE%5|)*QqE8{Ae#Wvc7z>m#S1;S$B(vx?qS#uGx|8kXGBim^n~_&MHQ zVY%vRiIxTFXP8VLUa1o_hQUAp(WphrVlXSsBb=>>XU=@l4`H1s$0ji3R3G>o{0-Vb#v#$6TcC_m$z8W3)dr(mpmrpT^mfuRm*L6(_27$o)WKOq2V*y_@g^ zSINB8zQtE}UZswxYh)VoqVDc{Cv-!g*;j$i9~*6QafjUL=~eFC52sci!g1#v57d+XlEGL0(r@ z7{SXJWKcp+Ynk_tZ}d%s55qD6CEJu~axHkcEA)g^6DyFv+~gO6lJMOLSr*QCdV`tE zpLV(s!_-!To3Cy$opSnmv(a3qr(F!g zG1<6A>gUfW<}Q;;jX8ZRO37y6kIB4@Y*TYZ+tA@|$Sd(Ql0>hPI4jt6g*NT=UvE;? zmU$?DB0&BS3xkc@s3aE24V~pkc?c=KSaZ`4Lk)s2xLWdb8wMPoI2|tdeS1~%yO}Pw zk_~0X5lP--A?r!5c-@tcq;ZdL2Js{8?3KKz(^7~q>k8*Y*j~C?KAR3YMcbF1Tb@py z*qZ6oX>47dSd~Fq#t-noY78M!%17|dD(=cva%+W)r`L#Eg?CRmM_$sGrQUODecbB_861FGT}J~9dOPn}+N z4aD}fh48VJMqFDKrmG~&v%K~wuaJ#$-Wqvl?ZV3zpzVGrUvHEdrWj-BvqE!J!I<YM_QL#V_H3B`I2baZZwrpP)xa!#d zEGCq#s=D3XXirqdw&pg9ygyrE-Fw_e0BIyUxZ7xm^~(oTF<(3xIfu2rJKDXzh$BiY znLamhfewuU(1?GT2n;Tg7P(D?OrCA)j5G8Qd}4; zb`z`zuMK3P2eC&bIFNG3>?VoKjR@$cWkWel99unO`zLMa`ZslDrB~2XVe3JZtL9u= z|B9L#V*h&aPf<`mN0XIt%%b)P9*y&oDTXmIL9{XiN!Q|P@2AifJr>t^GM`*S%6JzQ zvr3J_1z$F=tI!tKDDPTi*N@^T`)T%7E;n<~0k!b2s*+|;BbUYBWrk(wDdm70P=ty1 zZNy5_1P96+9gFC?6IXtRi!-|59}ebk@)!L`OaEDw=cwNMTV4cHxIK$RFNkQdG7ql0 zpz&<+p|*Z>1`EYm%lcGxM}bh8S~*O!>~n0~9M{W=!M zKmTY*&#)&*uSNO~mI=hSp2_1&!`!(D_FJ`476y6`rl*?QN6f|#w3cKd+)4ziRkZ&B zJw@utVsN>Y>S1CA`D4+2vR+f3esxkC({MenETyZvP8^`yWcJ_FSgUSLx0d zDAKr5w|h-2y`yKaBZvH#2Og-0i?UB8gG!Qn)L7M1#ew*e98N3K=UDq|M7;vbX>k9A z;Y>AOHf=B`L#Pg9$9}3)QQPbJ*^&Fav^Tzp%9Rqp&Gwkc8Cc#TXkFesTfDsCjn6W5 zd?d|OJK?T!h;D63zXW#PH-FT7)Sc&T{Km!OV;#K-eOT( sa*F=4@2EiE4GqX^&Qhp!wBrdakzlIPi^-)E2KbYfQ20V#4we5bro3AfWo--h=P-l_n@cKtPI_ zef#!9{M$F;ANDrJW|l?}5L7V+y1HoMv{XZSdb+wp<22L=_RjLb!Qt|{UISed#64ZZ zx-HKjBizx_q;ze=euQg zZ%d!OaN(ag;SAoE8DwOa05Y~YZImrXyW2}SI94Y1IzR~VmUgV5xcPVZV5bR&BH9tD zNh=s@`rUlZ+`%t+rDpBU74@2JK;m+-^-^v23r5(&|I!N2qCXMg|xAOx;DkNU%h z)zwd*$gDs2DIPF>y=eUU)skB=S~VWd8Df_`afii#6fSKr)5qub?CmX!_0y*%N~W(b z84qu7=lySQZ(1K%-q_yt`ba>uebVi&c~`avHm$Oes<^SVG{hJ1H5>$FkQoFt_zDty z;(||bFT@5yz=FTgz^Cv}sDG!vXh(PeTaDgwai~xGXE>@P-4qPt0B!A`L0$=~R z%|Jr@R~CQ;FNv!358`h&_C~~P^k3<}lJFrA6BF~;8ya)T3yb_M4*rjq#1sIqyojkNqnL&tLifUn&1B@qZ*$|0~JC#PYwA|C926C6yeE?7!Jqfr|w2{kLcS z7XIJFzXf?1{#AHrP`f;t3*0ED=(prQ-pNd}BJrV{qx zq*F~RfPS4|W}TQoo@A-c+w%(+F;!78(wESsPgEpGz5sD&c3Q2~*9?cviIcWQhK6ge zTSn%_L-tx`hcnK5O-ho`Ln0_RZ-{>!doEBF-zA|B9^$&-|4)*)H;V21_b3pM|2T%| zpozEk5@|xN|4)MVd)WJb>h=~m`wW33bJm>5f&Nb&*MD?8YW!&>^nVZHdlYZ)IJXM& z3F!a13juw_0}00g1Ytoy{8J0>A1#>{AfvB@Y| zO%{%m{rv%&TAI{6OzAznJ>>_2WdG9aC!KE6zS zeG+P(2l{}kNo6r-$idasGa0p4&Py*nJw4^s z@Bc?VG$g9E`wIeE|DYV;!6P&*VmR3N$~|MEY|hs1nx)zpx4}Ohz%Dm>xSM9(6LI+^ z^k*(S_kfF)4)hl@Jjj1^5>uSFsrAmXJvA2zbdU6VU+iwVPly(J)--vo-sq?}F?n>N zJ>s!YP9!JroTxD|P7);W|B#=_@=nh07un@zc^y`-lv71?(D=MAJ*U5sV*vtcr~mCnrXcUJ<`9!>7XPu&#AT=B4ie6gl*@%i)N`h2VOW(` z^)6wdR`8ry%eHV(#1Q{;LS+7+BbeBOH_k?~qIUIOTFIc8$PQ z%rj+d!NL#@LMF&V`n^a!2v^LD$M5A;qSVUvb}X-}OPs;$78ps$CnXw*UjTc5zFN*k zXLPDO7GhsE;XokB7LW&LEh%irsag7ciIICNzSyn>m*TV{Hku{gc0Yc)#mfzzHNLR|5 z%eX$>J;bqq0^VYpsvABeF=*wmYXej6PS*`m_AZ*7H$gW8X|rVuc>NC11#O@&<(eI1 z=DcRq7WG15Ln6|tGfS7~H>#+6S2fj%%quO9%pMor&x$36;^ylG1V|n?jHw)UJX)=ubbjh0_29>$ zIYb~3jw44+9F1dUlQeTh#Xh@R`lTpb=Ro3 zLdQ}}(KqPFS?5)uBV$$HV1v&`Vj5U-;;9p3!A=BRsqpN`OTre6Y35qZ?kQyD;>Ky>;)Aehjdh^*pE~hi0jSaq%rQm6U zUrAYBbK~jbZ@+jE){5qI-GeJvs}0N~V2l+B$Cb8` z$w)_)E|kousIXQnmj7O&-Tq#wR9$)=c=Eu=<$42`!R45WO&KneLUgJo4YY3-XJL`qveh%tk=g83WsD%m5p2-Pvy`hb&4vn zW#N{*u}L=+DUt+R#@61!q!K;-?0Cep*MORY!+&pgb96B){cN*f@EAL91hS&;st)m# z2mAZt*&ua9q!Hec<4Lc@k&7#NOGc~L8)Uz58h?CVB?uDa8W)2agVB>pIlU47iFpBq zNYFr(=Nl~YiHcpA=j(SfRW4@_RRU}pZQu7u!1xZ0uR06{7KL``ak_c=vQL}WEq5mC zE3AO0J!;qOeeo}9iZA1-jX#xW5=(dM;^$W+M;DftQ%2%xMD?=u0I%xN^CkXTKE5zF zbyLw_LxbT|29-(_^P8GDJO$I|X6p&wydl1Hle#m?O#{wen#mR=Mk#x;k{S!S;$i#N zxt;6Cl(eymc)cP=2RXEpM`vflLX&8_q4FDdGVvjmY-sfgO{;hICXs7u4<}YmYK?I4 z{lGJ&fHlRG4=m(jxke!(`+&o%(Y8=3zZWDXy=JoYMw^+`2-frdNJ8OT`vV&0p-C&_ zoXtfv*M}YF&F9*UT!)u2S@G!mLRF84{c9B?E4rM_ST1I!%9}=ju33xwBbqGp)B%p6 zVHjtlHL5ii;LOYkDgXg959q)zXEzz?8EL)VVwKq8`lw%PU;+Dh4f_KX#68_}i^u+J zgZKHmw!Upe`S{}YGFvjK(o{~o0h1=NkVeIqiiam1cdoLq!^io2IgKpK$iF(>NZpmN z@R^Hap^kk5|Iu{+>#_QBfWD1p0bGY}^)@O70j^B9AUoB`0KpaDekVa^*^2{_+fME! z28UpD3xBBg4|O%cL{KN{R;VC zF3@>FeQ|7?a>M29q}R=@Zd0&Co>Gs9WFJ)vOx*e(cr7PcZCx7@Fdah#t|`6nh9>2k z#I3zr_}2nJ52vE3Z}a%Kt2K6XeF;2SrTV=nDo=Ab(Frh8=#`xoGH`7(M!fqVvEG{{poyr#C(}nkh_OR!uz4J zs{Zb9GX-(a1|jhU{h^@{EP73j%F`12V!Be5*w766^_JP9>fJF3gxu-fOa+fu{fRICIOTQTB)Xp6x$Zh%kDTL=d%I-7y5JzUo4gR7 z72^-!)tRyPs6?@zKqaDIZYK< zal0NRERGRQR9IWz+ziRRFj0xk7ItJC_5U@kUXeleKIp_=M=69X1j@x3j@QB2*f6+W z=Bvc-4{OYB=6Sz7ig(~yZ!&QTQ>hlASZ&nB^p(BbvKJJ!?G8lbReg;M@GXxVd48E! z(J3^t3xMcC>K@J9oN<@IaJeP2S+D7qnpQ+qDpoT^WO2k4NCCecrs2shEDsP{ zNKD*5gv8BxZ5fn^#gGJ;O?iWE`qQI|B=0)_yCP2(0)zZ-_7W{g)IjF%UBxFv(jA!_ zW!3Q6!BcK~J7?k*gqeFg2U2!1(-^~Ezw~i12v8+A9^I)DT|8x6@SC~%9r%#3to5nB zkc9JOtQEUGx~O{$L^$buWyh0bQ19;Ut~0u(FkkKPE2)v&r7*XScA<(R&V3%&s8f8t zn%r1_n~q3hpR)@=%fE41MvbGi*oKPwVRjau-$BwJDnE3DVxbmCuvqJ?!&})z)-ZSR zD@54|JfliDy3R4bY!jlPNj=}4=*4wFI9X_kzBLgI=gOH(rWdf5Djz>aaDNLzQepTI z(%RclBo{-Goh{faV%!s9S_{&;M{Wi-DX3r-AzZ`#3_s%m&DGw#vR_jtOT*InHH~C) zozh`?T<*X))oZDrCkcPV;Y#;>eWYx0?axcagan+mJsS6n$9xMC8)>jyPlZv8izut} zih&uOXfHhEbn6sWtIhcHQScmoIO$~q-_l|__G-rnup%&)sm`e z14b*n|7|{3WOVDkzLeMQ{#cWaC2KN~K0TgXZak4U6iL7pmwYYO)LSs=283{UeR336 zXV7R!4~=V+Hy+;fA|Eygr&244PgQ9JEX%J0o@V;PSTX{NJ`rLhPBq%_w>X{RTCFxH zF?>0}WiuDsq<;`eX>hdM3{kFB`<}t=OwPd8x!L6#GiEX4OXKBNw`G&c<@lR%Qf#Hh zC{Ep8NHWPLJ`w)N&aV1qlHcNJsoFrD33$P-zRGr5BvP(kX4fP>YUN@#kCH!Q&JbBEW2Nmc(K!pyOI3$g5s$k92!9|84wDtw)(v{S))C+LO&a z;YxoZwR(|O>3byHB^_!KkRDBXL|TKXxbdnsZ%fL&O6*|-KbIlIu+w?RYaL)&qNP`; z14~m=^Qf3I@N(0Y^)9w4Ou2?tT1>1cF`a(3@ADg=ctV1lXGwmJCQ@I>`kE&M?uzWSH#E{&Qr7oz zH=-YUiD?Q5tSC0OKi%YhjzXF(DJ1ID5IB6;&PtC?tcg7CbU0+=ooS_vz+?3dL?RT6 z)A8sp6N}PrACHWH?a}hj>W;d}ccM$HW)7VAq6Y0}rNK5^ohv%O!rvS8Ay1Yc6wT*y zuC<`pUaYcxa3C=b#28nPW?}UEwOPu2domN^3OF(~mY_N!2{#k0ulfAWLOEso-OBH|xUA9T&ny-jnvT0zlZY)NShx*6 z^Q9_cS7K8mOCvOwHR-}Ai|o2v0`{_1R5327^mUI#J-IraAhhxzYo4it6T_#v(aCpV zR;!Jga#Qig0iFvQ!3~SuL4Mg&;8{F}Je6kA*Y*)*gd+msrj)PSK=L)sHrvnbrIYBl zA$QDyNu|-8duZm172@?43tzeK6X{ark7j&aHK{=buQ$2(M^CqDJPc>=$6YKCZuf1t zEV@;*I3!TeIT;uj)So9^U4LaivA8XKsP);qEbFK3wL0DOiVj33keV&Xd-E{hQm|;3 z9^}B*XJ)=bI^CI|FlD?vK}c0<(I(56h13LdD0Kv-IJ&u=!3qr>+^b7(x=9tO-6 z*(tPF$d`!={R!*%w2273chNSv-oWF*;o=nZ&syd2P9*S^DoVQ8^lE*`uvzC(n<1CJ z0_=0PspEt+V(w{OZ3oqwaa86v=yp2;F17@fJef6;g|8cf{H0&Cm@JpjC4M^wz)ra@ zT}t}};ZD&oP^RiUUpbFWcD|Iohsh6TCt}}y?8)gbmdmB6w^+=~bUn6f$*K~2c)UIo z$0rt#zG=BVo_;tSxcupm9EzwPF&+K+n;GTgyF_P$dUGYUPXwi9tjaabAaBw4u?6Mw zFtL|W?toSMi#Y6nv>!Byl*3)7%wd$spZI4g*)gf2vz*SCO~Bh@A!?_BxzXET{RboDwS(D@f zs1tsdU@Ujmqd5`_}seo$=KoFQYa#z;_gU@CaduvyjsjW5Z}3DiA5fNkTj zkN%N-Dujqh-$N-dsA1gDdGedg=gh3QYvGhTVbt-c^UTNZ`*vMz=e>WdGSblY> zvg`5|WC31TNPNP`Hr(v+kobK|<7Q?xl$ub=Q$)4FS&uTZAsK}yHDcKL%@^Jiyd_L9 z7D2bp3y1rmUetGVzx=HHe0N$;bv1cd2>k_BB2PzjIaNq%x}NwIhs7n>>6b0dCIhoc zo1~8CZ2;e|BFq{8G_~`NT&bS=hqVr^R_kx6veiNXS9LDalO>rmKKZIs!H19sDvuyB z(EWilh;7y%uh?`n(dvcnTxfqJDSx&Q{hV=h;%vP&3+BnR!Rav_q}`IvX2+SkoVKUa z`T3HM+vTEaMm~{2Cz3UulD1Uxwpu(Y<#=b9<`dSgpdf8s-<2?4ln~^e(o&Yb7H}p) zlBa(_@zU{I*!b{ z4S=lnZt+;Xy-|){yyyQaaWSZmfYmF{{ZOw6B>?$f%Yh28MF zdIJt6`s81;9t3nO3W`;q!=$v~bz-3SjBpbU+l!qxL>*m-b3OZJ`ox6ZOYGigINZ3A zWBoC!*g@?QLM#Q4%+$zozdk(a8XOduWbs`41e)qB^2eu}3`F|9Jd&5dWx1S#srE6i zzfU-&9dEvc2HwC+--@_kufg}jfGZFvZ@2Kvl;3Wk*7f!a+9*1ZC@3I>g> zc53Ius-J7Oc9Kty!jxpnw3}*8a|>7sBo9ve)gR_5&%Z2d)znR4RLlRO`1+pr37XY< zLpddExispuU@X?=I-SV?r948F`0<%sX7+1=#WSkq@=UV^(MlbV=Juc%VrRv5=YdhZ zW<}kcd;fX7#-kRhm}AHV>7(;8n}*@~uA`0nwOd?a7IuHF>wE%{_I6ohVtJ4k#P`;~ zAA1C#UR?(k6GC`3&*2Cx8|Q8KTW z+OmD5q0o#9arM@9#K-ERup!~$Rr24F!5cMvgin5^n8MdTVNwd?W*im!%Bx&9W_liL zO@5_N22%dcPMN7;k~N&0l{J?pU)JulqZ3cZlFOK1)dv}U&}TFLfK8u#%~+tenpE%U zdM{nandqQ1xWbkKOM0N=MW*h_6FkQ5@UYcN zogC40>vBpIW3cUPC>;$&=Vc4-oEvb4#&YNGef!H**nnk9O{dM9_VHYGEt4Ekf0(6L z?}U}mw{c3zu;yn4S-yU4!LSlOaU9WX&-N$9Qe~^BvuB&zvYLvq<=Vnx{eV!{wF<3P zv6b?{0z+1dc|*NzQr_|DNEZW(NeXj)I}+ntG~punEt{Ha?1@^^pUiV@Ktye?Uo^a0 zS0jT(FMTocvf4G>pYkTDkx06Nuf3DBQ-z7V9zOTi`cbDikfx@s(XP3;0k1V{f*du2 zJ`!a|8Fxx@JDn-pT_28T!!xu=)qij>Vxjw~Jv0!h+GV-ivMy2rx7@hY_jNiYNOQ4f z9}U(aUpHYSA^u%NtMhrWYbZ8@2ngg^HqU8Rt4$|3Ms>*Wp#AzqsjLfegCUO}>unsW z<&ix*k)O2TsG>Whac1-<%EFwjZ(}Di=OA}nT4UB#=0Oi=#$fq`mYyWEJs%inGdu%G z(li~>c7A$Erhy6Il*330)7PU2hspYOuW7bug&b;#&brW0tXs}Yqa|h+zvNf3Er&o? zr)#4|r=B7rPA21zH%_56T5EM}Y^7r5cEj}6n>6G~dxOCsCqLQf;Ffcb&O0*@x#nFd zlW9(|mabQ+A;34@u#F zv-qM=fRFfYvjoVd{>*fwgGH5K1Vt7cO=5%(b2+q-K|pL@M^33Rk2D}@OZY6q4_cv)|00OJe>{hv1E`%aUi377F`oK zgxMB>aOXNi{Gw4%_cW~T!j$55&{~_ImYf_o+|ehVnrJg|cpY0MDc#p{i&q4k9hi$C z;0W2eI?j75Hqr=p3`tq8mF3T-4M4aOIa2OxWH?W_oP2n?f8GYTCPA}ZG2m4LgY1khCcV3$TlvL(|_z2;9 zX|k4v``acTm|AkA$7Chd3^J(KZ8ob(UvGAuTSOauz7g)j>gM2d+cm69i=$vqF`0-W zUVUW`?AG)W@%Mv8j&_lcBYyR=-nqUx3vZl*ON8UzU2-%^so7V23fUbTJ1{C1dwKU4 z#1*X%wL?C}@wg`6MBVKdFk8^sA9u=rMUuQ(*aW(vBF>!c+rO^T>vhRUTcnES^&sT1 z6{uAAXUWB`?lg z6853oPM|@=fi$UGLmgjJz8UUdi2AKoqN93ohML)1m#3Go%A>k~`}I?9r!U>Nou_#5 z(X|eX)~;qVcpqCydK5|%$4wt&H=EN-aUopxZ;Okyo96WV*qD;NFjt4-7fcx`pZD$g zO62HL+jw0f@USrYUTa%}h!DBuTel(%oTu70klZB0Sj{BH)9RVHY{*C2q;R9|&ey-~ zoR&~k3eZ;gsf8=Ej=Z4QDOu9B-A}CEpl{Z#Ks&3Q=gW3_n(t&4SsUqz(v@knio87} zHDRlcS43RJ)2T!ff`b{OCD7!#wxsYirUxq#8aF%x1Zo!FHHjrrKGW12MK~4!bS!hc z^vVZN3DvGm5}aZDJj&smJF88S;-RM>`~w1xapSjTd5S|vxj402_@t!K)*K7+CieEU z8ul32cuxvjLkVww{V-q4E3OH!UaNx#y1E|Cmzg+lLjP-osY6d5gZgIZYf`gtvQ_9f z%69X9aCop%uIUlfYBRf`ifjH5LHRIPt2Vr9Gj3QuOU{+;p!Rmh)q$% zGhp+sRp3JiQ&xAXEvawRSUd3P$V9dFy6}u|~~idW;^j+@=en-Oa>P6;)Vi}zLIfrd9JATp4>-5^@#mH z#-Z#GijwohgCh*$SO{@~qrwUhV6GKlK@&QCJ<&?ikzZnfZ)K_t;eq^*b!vWSCJZnA zUw&DXH4w6%HIs$0Z*)3Gfwtt*IkVj3_w+6>)AHN-9C2HFoN|ES`YC-y=IxP^~UoEPY0zp)e+K|pD>{QgcINxTn^@rFxg3Q+x{iii19W8>8|W-1QSjXc+wIX@8>4joL2yh)+U;W_y#F6RmXEVszv2Qb`f zns)G{$RJsF-NBlqdOv5aUjgmf$}sq=JrTvMaOA(0p8JqF`sFL6J4JO?0` zjYnO$JJ2b7PPo{EUmAuNfgD)zv_GB#2&D#b%Q8gmVSc0k2L(!u5{i*AW^X6(d z&=#B7SoyS=GyC)cuHgT%A_Sb_d!IkFqi~sAq<^#*T<5udEPQ$Y<*7)3f}sYk<5SjZUT2?KaE+*9jwuz(@%g%(==dj4JYdok{*Fhjo+Ub zs>rqeyQHCY=rV58A2#|{R-fMM%5M31$F-iLq!Yk-d&hXwb-!1~CnZ1mgWvyw@UhUu zxQa~CjPoQzdZH*URM7#%TZY^F-)6z?kRnQa6gpVe&*&C-CD>!957^N@xW%CJ`CC+* zGMZ{=NZWmfA0d2Lemq{Zz4$S}8ZeFoNm;qc&*6nKbP647Y8CE_#E&KY#th>TbU(Mt z)EL+O_-_qtgWjUp#d&Dw#O|emhQtc!_TY-lj)q!;Gv^^go}w0; z&jw^be-6qD=kt6V9_1!#Zgz`9WWWvo4;w1F9j0Bygj-(7rNDf}=w95tP{2>|cJ6Me7?FQ1RSAuD5z9WyT*Zxhks| z;ZBdlY8wj7n$OYG$WRP7lZA*WRqpKW$}7=JBJ3CNhtWbND@E+|7-0hI(jcK#y)Q3r z$dMUzuI?@ix!LJ2pSjhKtamua#WyxZJ{OvX2)w`;b~y?pfDNlAC4g?4hquzPl1d2t zE}@t=bb*2Zw518-4m}Y3H|Z}{vTLz28Vz8r$y*+pJhp%q*p23oPkWQ1nK}d>t)g?` z$RgLs#)`yItc6Q=<0^^4v6Sl6XPHPYO2A)`EBG0{;(`P^k8*2n>|e>K*Z}J>nF<>D zq0`$uleS(2iP_P%U^B3!S_@!*GO6vtr8uJ#x<(O&a}G&~aeub_yCTHM=+VxNZN2+i z6|^kzwxs7Co{N-ipwC--+j!80AEBBPyy-W{c#h@?YIAkB z?o$iz?&*W~H1F0Tl`>Fe*m znyjm*4fqR_{43y|F4{FZ8kOW>``N_A1k_xm9MmW?vFG{*8c%m-`^EUjdzOfQT9~?g z5`CrAzycV`>udHbh{y^5?O9+SCWg|}y9Ykvp;|9Vv)E)9O5=mNh3R0mgaMdfIO!2O zK9Nh%h|bj_Zb2P71NznRs#HkkKlQg16JVB`-MCc8{=8*MRs{ndAsnGu9z}Q`gyVAk z@xV;bGof97aAW>-GnJ3Ob8FjfiuVhR2AFAjK%;CM)MVi!e5$w z)}*^mGOS$|zjY0-FA|vid64|Qx8ak?kWj$}{nx=6o{Vo#N@=*72SPfHMI{R1 zU$q2()?|7|+;c&vmvyrIf&}NJK#e{m{tu-W%(m6NvzF!fi$lxs|1*cyc3c9A5X{GW z92y!bI}ypWTpb^e;AwB8dg%5eJ7ieQm;TK**J7m+8X8`3`}-{$ywLn%X=xeT77XDt zMlzKviab9zlP}3!Dqi=my(`$7(eL05L%bEL1O$)Q3z6qb)HFVG10IN9>s=VvI~=&B z;oeCm6|_2^$%~2ErXCL?2GFQfNSv!Vhh}FL92wd8 zA}e5EG$RBOXaV)D1PY%Z&K3JcDH*Iy)G>?kGu(@j z*K;|}%~|XDR19g5!-svlUCJNLHAdnrk@OW6?w;BJa6H!a?e+c_==GU%hi4>#M%KqC z-)y!3X}O`(=CZxoFc?bzNi$$zLarzo{5;33UL0{vt)>8 zVbUq2Q>QNtM31_BUh8bPZ4f56y5AKE`n@MJD}KE&AEi)AV$dllkWQOX(eWte6RPEW zwp`{_AzW{mljy5T?BPtw-2ewi^Sk|s@|SBj3*SB8%Y8_zpRdqLqmXj(+1c4O>%THu zzG{nGE#CBci@;(4DHwtI3i!NiH;k^A{o*%IzbnZaLutoVQur@4>doVhJ8Q>&z4*a) z8>fB;4>_(!`Qmd8TCh(^ z)|TwR0ZjbOX0=e31Ct42Z+Zd{X0!cZ&Bj;eU0dK_)?03WjwqE()msRfcZ@98n%vgh z{^rkvW-u|KTxoT#lvqTsU6wI5rMiZfcd%u%T>I&|*eJ`tQg0B{o7qyMY%o{Ec=HE~ zoXlKz1^JEr9wh246n#R@Q=(D%wWg_=UaWH7p;)c|oy%nl!eLohPjB0`8JE>uezoCr zXmc}aB8yvA5~Q7zM#RUhPex}F5v^%4Eg|N{bZnD8sgxru4obGkXTh=`pUC8vygibzrc8?@a;&0}yc@vF_e+N}V~r$!7VWV&l2h)eU&K(Cc78F6I!m!l0J-Z{b^u z2*scm8dU48v)dJMz1ZqNvs|%z{T_yyf0&sj_Iu$q_ZR?pJ0=*;A$a6|F_C}j zn|sHC`1d$V932N|*n(Y84vt10jN^$zM^0pLDHKL#6bXQ!zR1kTql5V{`4nTLqGt!= zX~mAe8;s{49v^2lUQkzZ)Eq!Rc={;5kRQFO5y^G9KPq;-M*SQUjyT5HzXDmT-UF} z4zXmxf%)?JBj!5KqW(-N@A6^XtR5a7YKGTf_l~y3xt-5`9?x4$Au_x~8KP`mB}f8} zuE?>_zM1k{ES{}(wW4$kni5hgI*rh zzB#DOGumzugOb~16bjn1BUQ@P#iRH^7zy-RBIA%aqeLT{Rd=Eyv*aiGf<|zNBs;8y-A0n0n9ebuIvO!P`FavUAyl@djqXcfKqNwrwhvt82k?^!%PHj-#bb_{uC->-@HICO(kQQEPl2 z#vrw*q#grw>#X)eWM6Q}7acnMJvy)ZQO31OnBebaO*8>~-Xyl4A9EaXx83f*>}FZ^ zvho=JrJ*sHw45(YKzIJ;Y ztY~3o(w_NW(*~yF$MYu~ddL}E9$@$;j^9wn{nF@mnhuu`)mB&|8ePcJ^27JSpOvU` zF~RbNjiqF$ApA(R;H&9?Dr`Q_1UxJ4OO1WLKU4UF$!I64v>@YHFx4*>JAix&?~2j%bvlfMBJKeP|43o|mV zwK_*xuNaqFyWLloGvVO8oiNL)m1{eo)8S4*@|fxY#7Kl<%Iy;qzF&Jp{CfdP9PI{g z80vk!WY8O5Nd4I5cHVvuB=bn*@_aIjE2O^^GNL=3SE1k}+e|)WasdKoF+e|OeO-(k z4w1#ByWlVGP41_i>kMnlb@;rD^fB=9^|2}mf|pan+eVAny-0uSAx4E;IJg{4WPC%y zW0SxB?s|77eW=QO*$sP6GpX+OnEMp)#w)wt>TFUz9VY?t@N&)hQr#|FNz-~3+*s}x z^pT>~txz^gU!jy~@A!BC%D4I1(2iXz_OMU04Isy^Pzx~r3BjF``g^vB2(vS6h19Wq@!)n@K-yPeYoDUFY-wMoY&1ZVB9h^ zU^th~othMs6A_(d8ebsmmB#vx;Fm0$UR>14?=mH{I0kqoJ@=v9-WtWO7(PscaV1En zqMs?-#S=K77|zcm(dk=J<$HmpWf5?QNZHmr-5y}ZXbNT=C);>xy{^^5wee-c$EUmB zgJT^(tUJl0lJN`3f~%A|J>RVitgzC}Rch$0D&1>3WDYWCY3W)}IPR27R@A>(25{UV zP-bqExxe)~TPm|>B+@^pWf*GOsuseT6Y{VZxK!_8^i5EaJ-E0jJYHcgo!F&uVbx05 z8I{2tDTK(A*MAGL5)(nCPkOm6d`h5K2;@tCMJ+c8f>KX=4-}ubI0wz+O;B)?>~*}% zgcuyicd->7wt4yFH6ysyCA#YMo{mKFTNem)378h=DP0RQiF-k0_tGg z#Pu~HY5cg>aVi<;{y5%v<2^3HU2i3gin`6>anO2<#&Gy-|Gd!myjk;c^bU**%Dxie z6xB$lajCs((Q3EE9yVj<9EwE!l*VIIRrdM!?szB>nyG(SfWs{*7FiJ|gwK7iXl10j zZ&lJkSSlTTu0ksaq}HmOLLOok`$B&awC9=0FiX+B>@8)Rgq+)8wOZ@cop7i#Cr{)7 zgNP|_Ouv(?JN2`hr@J*Y^&LsvCS}bo#AO+|@mGyTi6@jVlN&92ezh=e8?o>YOfA$z zMLHg=S%j`bEWq_70_-6wHy4+tW6;ATI&h<3EPozFR)gu0m+ABP^rMmDlK|i1#!35_ zU%h7SMp{N+nH0}R`xyGup~q`JeII!t0|`~z^O+QG-*wW{t(U*_s06hM8n4?;4&=*r zooUQ4os0c(Eb=Rp(FnDkLQ{xyTf9FeO%hGxZmvi~fh+B=AxEknp$x1(#UH*m^I5F! zIWYLlcbu-E0PK3&kBxs`r+Y2S#$y8{kNhr@Ntq-sh$d~4wSNnXUMsZm3f+H}LTp`P z6!`O=9=Y2KIxK=L?;}fyh&*EC3QDDYmM2rvZ{#2M=I{$V62ZYN>rstB$i>()$QU=B zwnEMpuX?B2(K{Ywj+@_`IvE;HM{)I@YkSQWNOed0M_yxL@NG0Z1<%`<6^NGKPHHEY$c$;F)A;=OB{Dc)xnzAaOL^SIVVI_0 z#o1s|zixY(liW}^jndG&kiT@D#q@{GJ8ybS_QY``>R=z`B5$ggsol!9vYncqG)+sm zq%rMFyPnpw&+(vOW2=CX{4yre=U=(V@y<2zcXDgAo$SB0j)#fVNalt_+miF!uO~im zb;W_=@H(_EklJ~UFz;#>W71WNUiKFQhUrSK2zc%NjmKSsxT*{J7-M(go)n;c(_IS@e1V+IPRmS5hC*`L^TjU`P5(wEtlL%Bzq)=OisMVKm`a-(n=Zx;m&*j-y}h zIlVE&gJAbnmoE5#f{a6zU61Ji6$z5T@7n;7c;IGiEsOQ{yIWL`c*;E4v^y+tP^DTO zk1CQYqba{UF6|){644z;0s`%Ko6grnHKmSZg(f}tpK(0AGNcoPhfr&o=!V)qjS?j9l(ee!PQVqP#e?i~rxl}Dr6YY6g1TEer-4{R9Pjbm1qw=gq8ti z!F2=`IrfIWE#gb-UK&txB9JX!d(c8r@F@+O{^b?9Bwz<9-tQxL?&|8*tEyISkW6sIOxeLbz$GwfOI?ge>N|Bq)Y6-;UDuR_ z=A7=%=KT^XhzT2xxj|~(vhOO9=kmQYEmrd|p01v59g_nKPWU|VYXPg3O67E8-nYc= zJLemYNo^QX?>{+AsnbZwE_6qz!V65RG2SobB^HS%${i)}hCFK+5>S|fa{@Eg!glGT z7OK44+QD%dKZyT5L=OtWCg*ewbw1}>pjSx=f&@Yrh{U}Y`8@k~co3UnJLzz3Gw`_p z#be7LOL?Ei8^XF>H)Bjzl*n=x0iTmJDSKnQLvpcjeR)?)vo$;iGcb?MGMxc#M__v4 z8FoR)*Ll!W%-S?>nA%Y~{`R6P0&+m~vQ|B0@?f}v(H&!vYp$tx~6 zI!p1Rcu~Sxnn4IG-<(mkP#RKo5jk4x<~7WE&~y0wSx@Z>YapV)rahz39$cmoI$u(4 zqrZ|C`0m_@Ds%+ZdlsY`i_P!-`5Kjgl|E_~3td5w-F=c(30m5mvqKLk{f#pslfir~ zv+!>u+gFB6?*%0piC7TpkB^B;Mbg@F3Nwo7a3zQpwWKr?j%>TcEth!leXWA4QN~U~ z^Mj3?s!M;f9|e1CZR_X_o~at(YM$Ac3b*s*oYHC)^m9CuZS1&O4XnCU4z0K2=4(G< zctO$OC^Ot0XSJS1jDI4ltfn}O`Dx0AR1*a)Bz`IO`ZpB~8V;3wMUXa{7heA-;z9wF zD@g7)vNU%yHUo`vel2Ldp#v`0B$rgBUNWG$C?DQG-i!#lKW!%x*=wC$CxXr)*IN^Z z4nu%%FBiXh7+-91`0)nx->5SGfZx4N)XjTc<{QIS0iDwlR%L+drrU74;%4r4*MA~; z%qfBe(AeJ;weS6(g*`=myjZ^~TRq-`7U%kPaDwz*VK8~ch7e6#c;3&f?ijOxW zX-=tn;o?_4)%mJ&9giM;O94e;Lq5tTGSu_bYt%zzqHQ z^J8*V1bTQdAGeS0@`vy7jUe~C4HACh(KR)r(JV~p_m{JWRZDm0EJHKjxm{@-yHBqPLV zbUA!GbQ@G#8@t5KH3a^tmCl9vPxq}bZ0(#j5X6Twv1-us!tw^$mHhz-!J2X9Hx4G{ za%n8LqV;mgOT?`3|D}+H;J#+*Zv?$4YGOFYeJ6h*`{S;@)59{1OTHnR3ixrCXeLH6 z*wd%6E$x|6gn@&Hcb!$O;vzO6eVQkJ{#Y3sYi1`y&V#?oaPT+c3SInzuCP>GVr$1M z36HU7n;UbzXfa`@@PI$>n8SVbAn%5j{>g>5q!Pw|(oKGgM^~0(v(_xRJD!nimI3)S zDjW_swen-q@N+39^CM~OB&9+4qdGQuq3cP9`^nHF5$`4QBWBir(jNw~VGDev^P4`z z-iY03XLk)@-dv=y37ETb|(vs>e=bJC3IQ3D~e7-^(n^IU# z7#~<_Itohvx*9H7gJH=wgNWAe$b9@U>%)I`76sUpX|c8I0OieX-Ite-FAkj?HtZ&rQ!MDxs+2=|*^ zX+Eju{iv9#=1n&!>*KQD`ax2T%zo-ZxV*5m0``~-?cWwPyUa7Cp zy6ofOkHeQt87=l#(GQ^cAA{O(p98TuTwGGA_Y(B#3*s@dVgHvIGsnJy`|EJX@X(no z9EmbfDToHbqF(>kl=zT31^M~RbN=t7npeJ`DWQ&A`aIX}KP2_v!S=nPfd2pg%jctv zh|`p|Esf2CCpn>kUv_eIytFayfRleW*FQE3gq4IN8X|DqC_2WJfbIZ;%5YK$@H@qq zeMA2uAgEihf@M-1S)4~}`>(P7ohv4&@{-|qU+w?rfzxE6B1q%y-g|t1(_Prf){l;a zMPV(YYC}~LJf3?wY~zf zS-JxF7AktP@^CzVfY&QnNyN@j04Bwczl%)*)pwdVQH=jl?dj@mAb(?i$gK7KkGY&* zMS|VOSu@ypgoX*!3=9dVRvJaDAJis})yzd-TPlrYyeK~~+r6|T^O?^JT+UX56zhaI zYPpYyuUsz-DOb|?K@qa+%_U7NE;c$@ogv}H-XjCwrgHKv_l}7nwQ1f-bHVkWA0+aB z1{DiV*732Y)`Q&kA5>H2%wMF{tjmb7AK7duw0WDzv*p=}nULlF z1q`(0PG0vcGVY zj2H<{o3pnf>*w3c6gE*QRTb;Px&veDkxnE5T9n}e%F~%HU`LD@E5&a-n!fY#W@X>+ z!ucwX{sE3odb^njF8){Fa%t0X@kvQbov&{epkg36DfsEGE)FKQ;OwJkuFt*o{bHr^ z%Lmu_)h__Ae^0;^nSw!MUD0)jtqbWU&WPS`q)c3OEvmxnBXzTnY@7F9|KnzOU=!s} z^K5a^dymQ ze>`(CoG|H1L7#3!j<;dE>PeP|0N=pTSEGtCfexAMv&j~^XQCF_c3T6U_~Pj}ZnR{f zaBf~{n4>NkxV+{0@yTprpOp9pC$ej6?dR%?esI|#=QbVp$B#);acpC4cZR2LRiLF+ zvhE(gYML=TSV!>4E`O%&pOhmzZm;OXpN@}RT6`jbkoXwmA8YZ*b4e&f2l-5wz68-# z|IKNLl7YpiTBoS>;?pdrdfFe|xQ5DY?S<#fL>TKD|Jui^Y?zcKm@dZu22GKQqDJJ5 zu2s{wzoeCAX!qSJsn2&KEE?yhy29)W(#lJk3~g;QS_*<}v0id9aKX%tl@9DrFp4R( zC5RMJ@L&w?>c)NS5s?6W@!;?tS?h5=D8#@clpSeL7d*aim&z{%k!cK2eSC!Z>G0hL zuT3^u|Mt2y!r3UDK!X=#vg!TpyX^QcjlF31i0W8a4G%FtP{ZT$&)Is?2DK*qpE!PH z(GKTZ&Vb7iqo+IXjRjmbTeY9_&(XY;Bl8V(>v^Ux#Ob8de zn;IK0X&@ghR_p>Qx}w_$g)Io5(~6C$hZqYG`-k!38ZmrFieNn&5eb9g}#=$_G|6V6J>tU0$ zifmd=AiU(dMG=qP6x{)lb{_cKirtb)Rw`ab$tIsW{pztsIVvziP-j92d3%AX^>nau zaJk{tn{)v|rxdgtIgvG`A@DsNQv9Ja)v4zj6%yKcpHYrh2~DTE3}2a&V*wMG!qlBT z&R^MdU4Qu0riAaIY8Y>eZV7PAE}|tHt8I&|ZhN^Z5x**_9z-?UTSVvEdmZ*JvxFW6+09I9Ls9@Vj_F&N&~i|VW? zMRReZ^Ez&N_}WSPUMtklEev3UnIJ{nFO`~+%Yzw#)_HLxDNL8xnO+?LD;MTcm0Q=6 z?phX0Ft~Jc(@K}`XuD(9+x}&r$&3I9#cMyoV%>}6ghl4_Us`efs22}Pt55pV#iq-% zVnM%x@l>uhh5PiMZR@sad5SD`^2K?|a5!jXjc+(1lhD0OBM&E#S} z^Ta1f=gG_dSn(;-0)%sWPr`1OC(dU=0ZEijm;j_)M?Y$oi~%#tv=6tCiEJ~Vde$; z?s@6NK03Kw%hUplh^ClH6o>viYG&8+m2_jV={fE(WeZ;RFBVVg&Rgo5Lz)rFJtD&& z{tz5mu{u?UL~>x3NE0s21eYYWt*T9$A6PrUolRmX`mY{riD4(xjRtR zwLc}cl^z|#rIKuVSc-4K$GdhMf7m%gU>ZVx}Jt=~;D>^DIBqoCf*=lopyG({(1_{zDCLg$ov# z+r}XDw|6>hUMPCinU_Pe#6n|(P5GOLEQwc|okLbuT4}PPh907}F!07jrBYOT6E-K; zHMfR#>6$u(&%CpkihJ%7CNtS8VbbokbYPHKu6`YL?%S)1 z?vh^PYSwcjUwX&H?;Tj&^PSd0pEH@2Zfz8*#HYqZ6(!0c%H&q`q)c&0X0|bYUF(=B zJDwu&NHmmV^J`xQH)Tdx#xd{06%4vGGsfKgbYtWaR&0CKK?z{_11PpIp!qN`Te=V1abvZT^n@9iz-efp^wRM1yV|C=M-~I zAot_EqZk{i>4&j(GH0m)=CygJb${zQlx|HJNm76E=$~nSD$T zLn7l9SlxF}xApWMN}3$+*9H2m^+=(kiiSR@<^;#p^kBM_ETuV3^&ivPQh~*7O*B(MdXq<0v0tjgdp}=3cG5Tuj3a2|fN*JnIBiUK_3!UE% zTKE*1$kN7HMKh#|j3&HkHj<6LWH-yoIg{6nR#&tw?alwwO}J;%3|u)cSr7Kdl3c#M zp|EHu-xc}%kLf_CiywATu5-xihn*PIKe|Oj{i9;W86--6wSFG7!{%8mKk^Dt%VXDB zV5HCCzPlKM?YO*!QsXMbvE;%X=GSB36sBadyrEEQ@1C=?!DinG+!wL>?Q}^&^q*=J z25RUTI8j$GjRb0{VDz)kOMdYjKLL0iZLa5+pG=1GLmOwNlc`Q4_;k2L@NBA5y+rm{ z1*VYmJ}gy@0LYy=$-EHxUV=aHZ?~+iwZ}~`+M!PQBiz#Pj@Iv2*5T%%qRFE*dr;e< zu#9+$hTs-@xST3{`$iGZpy}Ik`W0IKSU;MuwV=)*`SD|s*4^;8TGiLh4iDVGRRh2) zo=?^9D-$y_hwy_|l<_e^8OQjFUNPL;{I?eu1_%M zv3m&rKT8$$;~{cWdUb%R1jh!7eH`#d&D=r zW_9(;<(l}HQ|Z&s_?2p<_36dNhY_^p-4@=KqflLdudB_s5cX;YCL`7)j!7D@x%Jla0H@*p?!%)u919dE9XEFje_Qm5c2 zjAqYmSm3IW*H4%)zQx*wF!7ZN@!j3RdkfY6%^JErV)RAU{{346CIGJov`z2he5?Vq z!>4KIqigvQ!!WHil5P_uf$>SZ)NeSBR$j$&_5&%YF1N=H1u=J3YFl;6PJ{V83l(=r z$$Z(!{7XW!>g}n+&cx?`*w}&ZnfvINi6h z?$?PQ_gLK9yyzv(WpP=SHDBCGX+>Ua<{cwz0!sf99t)8n#ECeSZ zK?v9m54TIdl}q}Q5_XX&8SWp%haQkO;vr)Lq7D~BWOoQ^6ezsVQ5I^+UnbVvIz{cA zynd6B5eq3|iz@U6wcW0HFisWikC~P9gE-pShEhGLs0#4I&BbKHzV!Dd^safl`Fe!? zW?!Y_vg5D}`RL^6n(;=EQrfQ=30se&p}`|S3u5n*%oPZueHJnyc9dV^RL9rkZ*neDs_DI{(^;j(clRx z2IQ5|DBrTHNGXxQhb4N<<7g;!Dre}G(62P2cdRq{AX|0wF4geIE~KF*6Vn<1GE-Z> z6+OhN2j3f~OgKW6o)Yigyp`F3mcSJY6SKVojf0@JDa7De1B>nQ_Y)T zAAcVkb15p}x$67JZWK#Ip^><8VZZ!uV;$UW*Gnw@9PP!t=ER(4tYewC^>j5JySUAx z&x|SLZMm^n`Z;Y550Hb_PY=OmTIV7p^ivx>59(3x<;5e9sAc@dD1QsGZ4JGMleUV{ z(gxLSGJOfG!~z+bx+?AKwo$3vl$y+am8$e90Dc<7vn*fpdf<|h{bDxBP)(+C7erP% zPYrC9Ih?&wbdh)xV?Fhsr;TMZkVvfTtu-;BL9Rzw+zqX>#$Dof-g>pzZ3TzS((!ap zZV9|8x<@;Kh(z+=luhS-6?@;moX*CR>3LP(+>j-da($!;*Y*1LC*S_ByIRL*Es)Cd zmoIoU`xda4>j$NlWq3>KNar7}35p*GZ#hA{0-g^|e6p!yrMHhlNgDM=80JJ-Z@i9% z63u1=ENR(y41Bq-Pf{^*sb)?~6J{8|OJwPs-ap*0No0OM6={^#_f>z@Y4Z>|<>*>7 z{mpp`>;d@W9-~_-lP2HsA{3fw3HFrDGhVG~)u{T+7Ta1ZmEk7KWDyip?lPL_H!ID!6d zxS}C!NK#U41OEDJ0I4BA&R}Ip$08E;1(~S0_=Vpn*8%X%`#DnJ*A@nDnRQCr2Q(^q z0JqBtt7SqO7k7*RGRiKfr=R{~ld*}B(qhH{X8V=pwaO{~bPL{zo`prB}%Mcd};J_xZ`(1fN=}4a=WjKR8$6R#sx#+WG`l>0R%Vc5GD&x8wPJ zN8}9rP9^%!RJy}em}-AOm`|kwxZj0t^4=?8)6#N9gDbu~79z{S3C{pL2rHMi8YT*8 z*~YSrCnLJ*1$aJ5%W~Hl?$mQFE?)V(c!(yW)EPmDzVjCq3l=AKia`iy}zi0yTckDm>|Dgur`@t z2FdpNzMoNATFT6k+G%2#^j@?^jE4UiGccHR8wp5G&x0J=uifIj!q-Yeo@ugAfDBsv zY$b#&#Qs&*|1?JDMTRGkJ+?f+u|1w2sxuQ^8!AmwD+WU8KrsdP@7DpB)KR)blJZM2;cezHlypjvb^dN>d|m8cFFX;`+Hjq#OPo zS`JV&fi!Q=i8aZR+NNryZUP*U3`+jY31NTB`QlY%l3~$3u{w*mA}|{%YzbBBXZkZ&EPNv7TA%0Mu#^aT zBUex+fuU7qmb!D`ObDDIA8IR(SV?chsH-88E=F5`himw=A^vby_a5cu?3><9Ja0GM>wfBo ze-|{42qHsV7$979fw+%1I-~m`Eu@TwngvbcEuI!QB=WnJbT`u=x_Rv9@GD~nAf@s> zwe;?Im8nMo4BNSavW)G)z+`IkK5Ag{W2nOpJaycjAz?)w7`|Bz47&H|4Nc zl9lc>*}%GnIa|Mof702~lzy5b=my9$RGb`n%qp92k>v?HAZ`$#?rO4^&)2L?gO@H% zlah4RVDM!8xjVB-o`y27PEw`6>=wf!-$4rsSyJ3{DgeB`0u!5M90~5Xu=j??f4t)R zjb7P#hPOo+5-rZaA)iGf>5jM9zg~k6f^YEJzssi9sTO!VsOORwn<2jE{Tf=pP%Mgw z_-B5s(Fa2yl_Lft(b)HH%0j!F(b}G2#>iP=Vp*&AZfUa_&v*lSu7`62;6Tp<;3y@n50!V--O z7C`;TeOIdx1ZVo|hWPqT+GqCoo?hxCYMnDm=ERRZ@=-kEzK7u4Ow3;NcEV>4@x|Wp zS)*|+jpL-YCx2Zf+*Fngg044)%x=RXNAg{Yr<|)&nXC|~_9^@)AUs^%M%m(fo@WM? z2AyU1(X3=HM?FRqLM~tQIpo6~$DO*M)=zipIBP1K9M5F1gdm^0Q{W75!Na8UeKmfb z3#G0TX;Q|_4My2(lf?9v8w22&qSJeCe8i;Q_3|sk%v3NHTrjuHBfa zuewz1z-TN*Fhm8qi`D74*qv`boI_(_OEURb^d?71ijDv_r;C;#LMarwqY0Ct`GzHp zZ7r_r)IWB96*x809Z5>@4%Y#wPx{XH)WHJN=LL-kFEf?nQuHf{u^wE{G;OXnoGDt{ zAkA+6DUaE!9qiB9st6oFLVcTbb)JhOWSh%nZE+lUm%Et20*%oV5!Vtt0! zD&5|R5xi8k^e>u1?fyPx&&gCW?TJgaS#Su!Z$bJ!7KW%v?PWT3aqt%u&hNyP{n6mS zOhP|&rI9S>SoLD-Vf@8Ps)L?hngx7R7BQ4Oo|bgos>j4>Q{U9c-~uA5E}=r7Wt1}R ziU=}^OZA6R$owx^g3e~$?b^SQ!q?3lSKY7F0I0v;J+?kB!@AKES2g&UUABl`YDgqW zL<&4HU;5V{z5;5Q>bZXM^6R%e?~XL@9h8*vuuI49+}$Z8^d@u%E_WX#W_BpI76_O$ zCjK%s+|z6)Q5K+*F0&&)lSOm1p|&8+^2I)3A7CD;z9{ z0Tm#`^fVZ^o#msTU-!GWNA|hO=)e*9LWtV^y-y_D%Xeb5lIRh3<&s+Sgs_T)%Y<3B z?Ezz3B)$`dy!~az!71|~Jb$e%>o!&Il%>^a1Rj<30``TUOVQw$@%=^%W0;VTOr>_E zZh@QZ?e#%$(`Vt`o)>9kH5-7=4YGJ=*Mz|L7wXnzi#eeCW#|zS%m;-NBu-sv6Ns;C zAB;^90_uCc_@Prva1qC*E!pi2a!%+}&m##Y&Uim{(onC~G>{D>a5T=a9S(mqj z`or=xV-^$ z)`QxbNv(T{atJpkB-wIg`U@zMm*n-)dfpl%JpfDexO@SFbZY%J?FO@KL*|=IB zV=Zo1-j(HO`?%bXS=~@Mi%7SY+|u@nm6_?<|1_ME^!wxW&WldcbGEFR=vn=mR&zXm zQ`HM*MIT@_VX2?5XyBb2T}@CCQ>>)&C9lIg@Z>#6adC7gttqkTvCmEB^seW+sb(mR ztBcB~!>Y-qli(IM;I)I~DSwSK7-MyW4|0ZJ% zdh6&BK&9Y%&|3$V&5uBNE1H;24~6rWKb{&Y4pGr}_}!J_)G4zIRdNUve9%O~9l#+Z z4va9wSjvU}j(C&itd;175>Y4i_S3@qgH}1S2WRS5(Rpe2SSRW24{5l8w*L1YjP~f) z%nrYbA6C{4F$ZMy!aclEIU6b^^V{bt~L6t{d1h|IE8 z&8`E!FSsMo7>RvJYwPxWz7W0B{`0JhA6K&E>sL7RfRc?u#Nd9+-SSmrcj(5=+fy`> z@d&;T^xlj>$`&!1C5{DS(yo;7QK`HiM$8l6TK)c!9&pTi}fCu_nPl?0m z%j*3p-J(CB&{Z>jDG>UopWyB|}Ggp!A(|>YsBO zA?$n&=Kg_@&K!xHP+z*9dmj+`W#~KqY*E9Xpum0jaB##48RmoSGNMucY4%nuvNcGJ z$S)#W4&+-;0X2Q-&qF&o0r$0&b|RN?o&heP6G$F~drh7%_S|{ANNOwC5sO-44f-JY z_g4c1-KfdT0?5D@0T{ic(*7t^wg)f!VeFqe7+^yj$iPOdL zcVAk8^B++D+t$#ZwuW-ay|7?~vqmunIXe<+Dv8J{gKnckeK>lqy9C7P5iNSQ2`(=H z%wJw?1$`%IIrzXqelh>>KG~Gn5Dq*g zMiyTDDXvj<7uGOf#YoKsod~F>hYlGQ?8o)ESl@&ZY-ryL?5p`fZ-5U&&{pu)o$P!1%9-qVtE7B3-eqA_RaT z7)SSy%?M{5VH7EI#KEKC%UV}hAN+z!4v3BcKw}QstL`_26{`q zFuM>E7<1jf4=Y9CD}rz{nOZrXa~0+|*8%YDnk~>y#9W;B$yEj}IYa57=7w2EHkPduvAw)#qX{pZn zf#B)s>2J3V4i2;+9lp{;ObiU6hzPl1nXbOoV5N#~-7Sa|$KyB7i-l`K(zWd2Ig6)} zr~3w1?26TO?&~7)O5fQOYS3KCo5}ppT^ed?O%-sCipNDMAe~O_hTM$hv2;E!AYdde zIa&yc1q+>tn-z_E?F@TdHTgWZD4Jc*KO0xAD`fkTvzeAl6f(5Y{utA&X<94&sTOvJ zfJQ2hr1nc?8LIAC7{W{2>d6&Rp*_O3<12{;7dfu&QQR=|C6S>DNiN~MwSM#9msF3~ znviJ@IW5n!ZM#Sz$;yNRzq6Dv%h=|oW^(f_k>Cb(B;A;7s&o2KWeDlr!+hS=6|2;i zH`rz&U^V;QhnDCDlQA+W6Nfqw=v(N+I`Z|Y# zWncdRg>>%htwYVgz%+Mk3~!n6pKGouU>KmfesWH_3H{Pp(rrOdc#5Js;H`FBiXNcN zuEV14z*F{#hT#PB`j{guh22Jk!IQMVSBq(%L@wT6{~w3Y_HC|Rlo$6mJ?#} zbaQ5%S&DG6Uvx9+wv?ORrnlLl?d)UwOcB~RwR_Ukk0AMeQFM$ykM zilvb3cIP}_7ep6iF6VTeUti~&jHN_3Bqk=G#6OF8-Cu2!;y|W-KG?tc;~9ND;h^dI zLN0P1Nz_NQj#q<1*ZtnV@*cvP&frO~e$^khOfRw&(pEu@w<5&xLD64Jck2nt9(c%9CMGQ%DqE|pVwy#pvN!~YwMGK-X%FP6Zp9@90}-^jY`31nfhuO zznu@KbY&mys;t|Bc&AXZXJ@f5uC8{5^gqSp#cnnxNyjB*$YEd%%o?esuM2eXPMr?I z_BBC8xh>dy+|Qyl;4Xs?iW`4O6u#eQR`w3J3UmAE!QYwfF1(~V zTWz1;Iqe`F4^G8~Yvc1-6J(?PA$(g4*Z8z$L!x1@Hl{WU6h70 z?i~X#BYqheVn)InS`i zs}$H?m%y9IROxo@@Ffp(yXt>)yHQl_g85h@m%&*!ykA{^V#_gmu=Wo|;dlFX7 zSis}HdAwoXsA~Z~tICL}ci}SO#iAl0d-$cH^7csGa#`ucZj32(CT8s^>(c;MhIl+bA(4zY@2f7$xR{u- zaYH5kw7Sm&ktD1Z#0(eJbEOHzmv(kvSgm@A4|qWKd>&|dP;ukQ;mi)TU6<3PZrHBR z7La0|r@Chsr_rkkXn3Yr$IOts)2m1)m&#I%`S(k%(Hvyv=sn8DZm(Sr92i7I)wiqB zS#qQnj(P$yb`J@pMQeXlNRwT9^ z7kYn>M$`w$B?x$i+6hyw)zHUW@H?ABQjZJDrK<@5t`>umgqaB7z6TcE_#R{dB}Vjx z`trTT) z**N>mmUr&A#8Lyl+rK1|o&6%2?ngCzJR3DNpT8M%+?Gl^Y0Mf&1aFl;8Rl2J^MMD= zju$yST}e^&g-|}r)|vIgyNcViden5H0f!=a3nV<8f%$e=rv*A~2vfs20!4-i&wG44 z?!d!q2>79E=@M9cYOjU3he5**Q%KO!=f2UGX!aE3bk%+7Kts!w4Ed#!ogB$0G$C+H znsRw$)z%&#a)~~BlEj(;3BVtv)LKk%9-cJ^>2JDY*h=*wN>M{GMU@!Y#@o>`=3+Z_ zjVdZ>>sa)cdXFe-@(cm0`6uS+=lnOkObVmB5?rPSxj@{ zic`2?#3N1h^oW-RMnP~Ntj7+vm(*&UNp>WyPpqX<-_(Al7kW?)cN6U7>W)2ty0>ZT z48)!*RN>Twh#FfaU+MwgBnu)vz>jkf+ah;H{j6!Yxh%;gYlbq&YPCx}9>KD)r0Q&c zZ_liLV*UM)i6fxdlPiJE9xq;!9FC~XPN>NijBYW63oZ$ZHx(%zSpqM)=4zD#x=uGU&! z@)_IJ1+%lSX?a(&GJvyma3N1a&jhl-w&N-4*R3=%anR+OA>QC z8fhLkgG_H3mTH`kJGhtwG+J-h5N7Wz=PRf3hLpeY-9lcnn%kjwOwB;Q^cCXufYR5G z|6D5;G?m*wNy+|tp~kqjT6+=WAHd2ATfXd=BP7y)W^*LzXM?;JC9?2EHbo%9`CZ5S zXMrNexVrZuLE*~nT+mw>3X+g_X)@}VBu+?l;_O5`vQW=Re}j!25H(xx3J76QW#&PS zJ*84e8k|rCn0z>r8GfxD`=qKAX;Y2gf__=>Fs7FUPEwutgMb)+1ZNCS+bpLV>g4)x z`+1GnTy39c_bb=#EgG&ni)VxWQ}DP)W6!%4p=3CNcOb{#>pHnXuTmj<4 zhs^Y`x13F$XkEJ_ti~nv&A@LfnCaZXfi13RWX=W@l%A9`w}7(&p?4C*~P;Z3Qkn zm(97QJwINe;UadMFHGt~FQ;($qg3}T1OA$@N*ibx^5dN;+ll^jq%#eh#cWbKDY5y? zjx07NrZ7}4iE7ntm*QIF*WqDL$>Q2k624%I^Kn)Jz2?*iV_WwNS7wf~Vj<1_m3a!k zS5OV#T9X3;kHoz~;&}QcBB!sf?-9%F3eCFX{!%=!?C@;4jfN5=lVppK`h}G#q3doe zm0j^_(Tv-1@74T#VES*==|u;4>ZJoVXwI~YJfPn3XERmNFttppKbzJBKG$HxpOZ}% z1?FD&GxXxXXNiX!Yqix&15!8-Oz`seQ#oZ++IKJm-+1hzpRBoE_G1gV%N94fh2cTW z^?Arv!ij@alN~Q6O!^}Dob0LaADGG!uwG&ZA{E|0G|m=b6a^$(Hq8$v`XhkWB?YK1 z!600YE%q6@X6N%Jkozj5*PoLi#v_|VaL$8ZcAconcitdis9V7Tw+w7ya1P<^r(6cW< zvd7_cg}tdez5C=VkNwz_l`kS(^~UTD{Bl=%U%|mZ7du3%sh(=o&}?~pUmo7zEHH~$ z6mJ@a&I>EVdlj=*n>$~x+L8Z^cjG;I5MPeohy&Y)Y4mF6_4Co08{Kr4N;S9(pT(pm ziz%~R2Nb0W)$U?^+;xLqZH!ZAHZ`sO?xxG(<@EJE@Y?2T&W~|%A5@H);jMZ7$?IxE z3umL)cqG*Zo9_-~;Zc5Piqxkp+eO1Lfb@Lk^MYV1-0{Ty^Dx`h znJkX533wfe8V>XN3A#;gP^90ekU=u@O4a6)gbxG1C7pkMr%X?$%h)bwhEIl0j^4L$ z?y`NjF;;mNdC<8T>hb;#;T=i2XC*_G7>1;cmNnRDVa70Bprh$jqfD{ei)2*sF-`r_ zh(UwUW((5cQpgM=+Gss032TXBX4aPzf zepx5EnNykno`Rkwb85K27Zi5H=ZTwhYDK~4&J&=B%SV53uBJSFi|lfGi#2yV36FC? z?)W!Ve$n&uAm=g_SlJoATfWXN+y$y98CIu5a5#I-B>6v#i^4S0WA(jh3e%kqDpyz{ zH;!3P(44ai{tmFo${pSZ@2l=Krl-h1?CyI$Nl|9yB^+n>7q`0x0PZTi%nmwb=}Tuc zYYO#VQd!UJyLPIC7!!LO4DZ>bHy8`()(f+c0oRLuIip@A?t!Ta^sno;kc62CpwI3^ z&VzR3?ztg?c@MYx$9D$0$w*c?=^``^CuLnAxzopx%=TQ2+!KXR9#uUoVA8?Uu!)IY zILLiYiC~Z_*pM?NCAK!!z8TVDn#zrUa9@pelCwC`CK z(s=2ea{0?=zr%*FFaXYDpq5DnW5fCn@eCJCC=SP;1ed|*k>U-wuWPa{F=SfyL+3eM zZV%-JsovsLb#FnLeGexzmrP zHX9GYq+T-8-oG=_A?!Fs(DNnWh{!!2!ZeqasOue0B}bG$v#mjWsUN~!^EyyHsae6-0k*U9d~BfwGRanDmz!>v`@xiu8G)Dg z&nyH?wpp0A8ObaoR?izf;xse$i8}6zbLV3XkQq&zR-xcXA|;5v!A%dEz%(>9rB9z> zbMT5+SfBBE$0LRPSPq9v?y$E^W1`Bu%MEZh@)l5K`-Sgu@^x$b_bycJ5pvBB3YbW< zNT?>JHvVLh4dJg5)LnF*yv7Kw6J~uOBpwfMruz0$V+WV*yx_5Ll1e(&?o+y~0;w@w z@%_-KL_X*r+jw+XLxua*2%$W*_Ro0{DOt7(QbT#oNs3X|M^R%j69!@Y1L0S&zt_}R z&}J;LmJnsU>62;q;~TDdSzj%3unN>Cd>J$SB%k#9<17FC!)j-eQElzjaKCW#hXsUE zN1alH*x5&<`Q1(g4Atd?t(^mtcbwVV4KBRMBeQ*TDyuA$Me0)Okw{DFE^BFqlX6Xs zMV1qp5U|~lu3V%ORGG0uN*s4g~Kg+5o{_I@l1%(D>bwKHt<2F)IyC1!G z10@oS1DpjMTfrbX58@@t%BSD!qG=*;5a@8R*c^ZDZIH7jr9W=unWexkaG0E|M5>n` zbphgORB>9Wh#gU8J`?pb2Z(4992s4v(F8FfsbR+94q>5|b6~=vt=MlVNM)l)9D|{UwJME5-t;LXH+4p* zuDA)bp0Q<6#P0*52ao#dnKCSLGu-_j&fYSvs_ly!mhO;L8j+Gx>2B%nmhSGBmPWe! z(4EpHAl==PhwhGNqu2Y~=l^>@y&uJW_E~4`wbq_<%rV9s*sUDS;C*rq^Gmn!`#9Z$ zzl}t*Xkv)uUOqSO?S2GzjsWx{fP%X*=k?LUw88OwhzmJ9VKS%h>`acrZBsMp9_!Nv zEDYysz{C*?UScf}e#)C`LBvYMg8C>)ayg$AujVdOt>O(H*@SUAq)vV%@hhmGo^M`R=0PeMT6Q$Ckr|@#&BdD@vZ-Alg zXP(}K$#luwM%QqNO^Jx8abl<`I46?lEi~S>$#QRh@C5-9L3#5n27^O$l;R0L9lfn$X%A3_hCm5$vZ2~ZK zsHeI({M~-fWoKUr%Tql@cH%~)n$B$Ns@rD;q>G05!^S-#oO6#b(u&&kDr`vdq%YU2 zu^VIGTK@&iGoT-_i3t=YEB0zA7D*VGh@bLo^mqIWDTOEYDh-Keau8VGd+v>cXfD7L z2{Y4bMiRDtWYB+r;(uxZ{(PkHSSi8kq_^@$p$__O=|IQW>etIb&DZJlcNnV1nQ2%m4ke zKxQlkIz)afYN~_AKM49im^{3X?{h`qr%=RyuKqo4S@Gw4iNi@Z(*1XIz)<=40URI8 z;El-NA^rU*73le1ASlWu9LV1Y&c8n_lLRP-Ab$Kt_W%AFs0qkF*969WVyu2Y+yC5A zHVWX&z$E>A`R`5snND$P;9lb3cCq^Z{ryy;KvBR~a^vftdc)sgp+kaVfqQ`*R&y8r z_xrKl0>un|3Ok|yofdg<;9l|^HshB6`~8T|Q%Qo!{^ozDg_<0=7d0UpxbAc$ORxqDEd5Q zv@1iWE^r`^C;rY*NFYQgjqU85)?|c%&oW!kC@a3Uud1S=OyOmJ9cbx9vAJb{Taud? z5tiZt$80{$C3ApFpPCN^cR6uL$Vtt}NH)UW!VsRql*LK0R9MyW&|@B(nrcxe8me4s zGB|s(yGX;(BC3{@#I$|$G@5v_ZaT$4vvb2Ah+QNv|tj zZ0sHiV{6?{HEj65NsHaGY&n?a98t&vR@ zKHaS};lQM}tJj9#EPEU%k+1&f=^4G)o1xwsC%0D``0)1uhkP-y*#LNyz<3vpAV+6i z?vz!0rQFQk80L~s)M}nls5CUpP$i5Q8ou&RLM#ak%3R-6P(t7kLrAa+{j~kY+>+<< zSn%}tJkx5)UAA{vt3d4x=X^huVJuZmW(mhP!}DWgt48xz`F->PD~aCRe^xe?-1DxH z>mp49c5P+}rp;M_Q2MeG6*qJ6xIBicXkYDKG$}lYoKhAx!TIVO)UO0ZO%6t!Nib32 zzBFoeV-EIu*fdt!53+8{y3wJ{LAd5Vxr?;rQk zGrj}JNwLZG7QzS!2!T)(-a4-9biU;%7&ZJ*uxNQr6f)VEy^%>9jSUMGa*1Ywj4NG9 zVhVW}{#10#mdmNIsISFK=WFtwxSY?vJ89Sbv-~a*urv~P0DEKw8=H#T$udHr;o;%> z03+t7-Hih+a9s^~`0oS5f_!xih4e?jDyMyMXSiuf0g_|3RgEpDJTs2bEy}Bc`d$7A{W+;RQF z^ZMtQBIVm3Ta9mX{u~gogpl2N9j^P8lm*gh9QQa33ISI3j*g&p%7`&e>ozHsa-0n6 zZXmW5fqhPI^|BU}Cbf1d;TzPK>S*fe6 z(QT<|(4b?fldR>f$J!gaZF5sBoV@#>DC@qfmE-}diAtHde4_#0r-EkE3jxz#8=Qr@ zIiGqOFW#9hltHnxu~kIM)3@C04!oP~DEum_G&nUC02tk$7e$@@Go7#qSSeqilzHjZ z1O~j-^D1DfD;VRX5YpG`E$!3ej?)c~wWB2fMsqtd1T@TXkzzPZ)OWp?Ry7o)Y@jLz z)cO8l4)YHtKJhEdbTu^SR5cE3_pZ06XE!zXy~MSGPS$AqZD4Dt6`*U!duOBqwTZk& zz;eFilt0t}ug`KVA!9*N`{SRNG~e7?*%HVI>Z%Djx%2gLVsQA(${XD2EdwKi3Zvd{ zjn(#47Jz3@wCP=hr2~)+=zlP-TR)tr&rtV+uq|~ynCIV{DQ8UYl?EWiLcySj4b8En zN7Xa;bbBA4+CrU`#~Q1L3?`FNbgS~;&}4Vdk7sCd(mQ*h&JqN8ZNC<*x>Y*K>9A-E zuNq}~c)T+U`3kRcVRrFwyT`y}cW!dJ`gDACv>@UA@Q_gHk&gNk7dUeIxgkuHK8{DL z7VEiFZFUL>;tyLC1+rkSkKoqfRc~g?h*Zi95MbxsEH+d-us_RtNXsyL&XV zX%fnJl*qRKyfibzUVgWPME%~-&?x8R#G=(y33)*l$t6xodSSBESgY8VkHO8j&3@~$ ztU2HF(k6|xZ zdc_lPw$Kisy#=i;C$}l*JA+`eC78*XYG(#Bplt;qhsW19WMqIJDFT*eK0Tf4ae$e^ zb@IvGlpE4uj}Zc%oEGL)$p>+g<|#W@+Q>nEED z0FK6U)$^lxrYa193M=dB$%#^OOiy?hkd?~SI3E%910rUGpfih^R&J?no9y7=z#WeK zH)Lw7iG6r+2fK7@c+)>Wckd4*pjacOrKS}C=&5+S&(Z}H8kZ2U*LABL$!!ob^%+lk zhH(L4%4Ev3;M%JbVSF&+c6+;Ly~BjW0s!~9r)#{^(geF$kf`?oTO&`2PBv@`v=nfb za<8tM)F!rSE_*udw9~cE0f)-56>n?BO<+@q+4J5fGIO_$b%)}f)zIkxkr~wrbcHfX z4QsprKiTQ*ciS8!K40bAboWDrHbEEHD)X5#F4uR*?}m_Y>4)tl?|kbwone62nTbUmq-H z3x}#TRenu(Gs6+i2+ng}c_&pJxkmk{{r^&|! zBj-&=)ORXn*~3!>a(=b!^fe0`ypQ(xpBU+R%1s63<)bmIWC3bJ3I@BPDb`a^F$n0Z zEaw=}92ooUn>3J5DR$08k#_Ao-VtY-JwA$K=57Y9R!0tv1E*Ri49CC>j8yE~-%-Y= z7oo1da*UG$qk3<60Yoc{_wib*g{{NO$w|3YrfxcHj&6Q(o;4-w6{_CJP(D$H*zpj* z@9PDm5uPqQTfgA)28wO2hun`afi{@9xW&_TH?BAG_G&{!elIS5go#Yua=kbAd;P&{ zoAv3873d@+XSzLv6Z^fE2q?C_-+o;iX$VwEYao#=!Asqvt=QPuct>h5)>&SlQof9Zg_Yah=}rE21rJ7510v$?IMU}xN&=PS zc;`F_45eM^HaAX=MNDi>5DS#oP$Bx~iKaJ9`hWtqz%Gt<45ZXp5@Y_{l`*X6<(x+j zb*eIN9rm1UEx+HV9zkj82*oC+_dquS1sVe`>SrW5Rs#}SYbkURu17oSxQ(CqF+3N6 zwXQBr=X+(X=A#GvJCaT*uXm?fzw_}&c&y?t`XOh>-vmp7=LCVWv_jC;!Ad^P1_7f} zvwCsn#|2B?j}LP5otKkeE8^R|*ro4bF*~8;U!;_xtnC=7`?euV&J(%0aI`)6WkiyN z<8dnsua2G0Hc3f}zI`&A>Wdlr^|=cr=KC#*Rja9x=X>(k92SWmL4s@(O-BtG(a~k{ z215QVLt91NPQF=gD; zIqRo+0+;|qIkx(}mDD}Vm)>(yPgmYY#d<2UnrkWDIuirs%|)qC6B840#}nDY68g2o z9>+Y~59hSE`)4o?Y0Sd-PxC(=HynGIjp}rby8>$`8y(Hn?}wcG?+<3j^mB#5V8c~iDD8i|EjXwiQ&%UD)ORIf*ffSOZ! z4#m+#5?T{4Z@ik-AJUDZhd=C(Uzo@YlB=Ndh8LJsbkm1y0Cdkzd3{2HiOCW*Mr>N$ z{@A`EokOd#*w|EyWZ}yIz9cf)pc%@sLBX{v7Mm}mspTM~XH1V}(_yX85$5zae^rFe zkS=~59$KK80*S@N*7s;eir}V_7-PD1_D+ecpwbF|ApSv8zbA~z?h6H8>sQgQufTnB z_^ei%)?DQWd|mek^}M4$g4b&Vjq2;pJ1qFwss715GyO5!pKQO0I2GK$KI;5{+cV;@ z$sNNuabwu*@wM4vd-F!x!Xm6ep+_HHQZ^QWANCyMHZGP{9iLm;P8(Qd3kZW5Pp?HV zL&=1syWadz|LU<|9+mnh#3KPR{a|z(@qzcBUyi*Z)Ge;j3_Vu>UT43^UkrUCV>nGp zNnst&sa1|7<}HIvs|;oL)fiTqdL2o^KxXqxAQf*8|E^gM+@d~!k45Bz{dlmR)8?t6FEJ_)kjQS-wqDAr zs=i{-+ZRV@raJjXCc-wK%L-@gDs$n=Bod-oWkBuu^nl^GJRgv0dCnjGCo+fkIsL>Z zIoZGPb>IS!gxF}n8yM!=CK|C?(RAp!qj9-pAV&Ejzk3ClQVZ0azvi&lW!Y~J%bA!I zgp6Ll59$I8*kw#N`*kMaZpyV7p~>1?8~p3W;Qj@3`tbqHUu^x$guUmCsSaIKMj|_e z-b#6Bl4O#E<1J5e8}i^}AR4jQNZWDtJkR5`J`lXPy4?p;0zD{FVA5$X#Z}dfb)wCCYM(lct{G6*w zmdvP+whPa@au5T@oadZlP&LU|g9$Og4Zw^W`t#Z85i~TOHL7#zRxukI8IAAH$tVa3 zvC*lYV?P&xmtD8%I6OJ}^WKf(%nL0De@m`4kwW7+Ws+(E`h^cBpTQ2f{DMI&!!)zM zDA2$D$yu`1cGucVm1&gOKT&8e)7cvjxC5XsIgUtbRK+K8mC=_mx%@OK!W`$*ifQq% z#)h89=k%JVQ(|=4VKW;m>Y@Wqj)2nIqqQ}DG~Qd}hWFcleu7Q-%@H_yloBimrU0n8 zAou&+tLKybGvu5a*de@2sLB0SRy^8MH{DjPva*uN^H$JFi*z1%!!;Gi4|XFlJ*_({ zIxcI+6-WiqGr9IdFGF^~UA=SlHOo`8PS_CK~g#NBx**njBu|BtUaJ zA{X?oV@BkJqX9Cv2ZimHM&&BQSd7AuqJQ+adJwy2zVCTm>ri@H7A03K(Z z^)nv_0+im@}H5VfjxkZa%>U?vnLxlb;_H3ISKLGy$j3w{Mh_AGn9z4vNK3A8Z}% za{%)1DVw6))xu*7Clw7%wd~dF!!^`2H`&s8N35f+pv$ptB-RXtmJ|3b<5J;)%Z)X# zSi*Hc*YwaHHTM#_5cCLf=K$x)yo#=r#(0LgvaIGl`T8r3MrU2$nFe#>h*i}Tb|RH4 zZKJ@=pP|#Y1s9}2p3wNLrX4e{WB~3#esM6XUkMwhN^bnU1>Bi^vJ0~h%Tr4{0FVir zD?bUDno=9M$t=ic)Vjk8njYNru}+@WB4eOq)_!CwuzNn8VhJEe^WN%&lcOE=6R^yK zW;p^I$6$fw+nt1t4lxfUBGTneS_|oi5Mw@}SnefwIZBL&DDfoWf-F$Y-;evQUnr-L2s~pX#a3^4G6#xRlvORs30#K2Db3+%6bTMu}dQ84OHR9g~sHD3c zotDp3QYAn%AKw#})(JVO#EiOr^gTgs>tgM7zkqyD1ozH<(uA4qB4<9gBzj^LpbV|b zoCzn%C8^bW${B?Xr@js2e=)vpA#qpq%fp*Ox-s$yhL?P?#$+sl>G5*exWMWByQ$eP zv@GP#pOa1b_(zN_CtuoM5TQdx5V4E$(=z2)t9PD_S6`8Ajr+VIi%@_?;&Hp8=)c@$ z^v}TNy}aNQCgi-x1=wR|N;tE-K$nM|p3cu2UOqk_kPY|`AK@}SE|;T=_l9MN(=9j{ z*e{5l7H&u=TqgrK+rSt_nv#3oRgJbM)#rosF?TOS%}fF+duSm)g{?qEdVqEB6a9&V zkvnchv+0`($Sl^5x$NgmzU>yXH5ZMNA&MNHw|wZu=?PAq<3hy7;h~{&s9vEho1-_N zT*tBpH}~5;dN75rBi#8saGsekohGPQm))RTG+FcZ;^Fwg?qkfz%?Y`T_rE>SSSwIi zo2*fzxiw$MbNv1fi?bR6FB3(*)((MiTa41b&a;H8kWe#nciNKAz06`W_?AyxvRM+T z?Ot}O*1VTum)8dCrkVu;?`_K&x=}oxS}sLe!HfB%-gJ*(mWS&2faK7UQj>Pf>4zFX zzNF+WbZv`+hZ4ixtR!-4xfxkx?Xr$W2SX8}{)0Pit%unC`f{H;3IOQ%X}lk`-CkTgxMq<^!%-vQm&gOD<^9IP3$)uBZ%a;uKj*_)%r=x75$-9MeiAsrW zRjQ-9t?> z6L0q2`3F1G*mJ?Fx!jcnJViuAY>2wBGaaOh%$-}~ngdEdS;vXRk=bB;&dXz%2?^X0 zni%TRbS9jzhX$PhDS(*Eg469k9g16Up?Ah(>`%MRdcImho;al?)tv1KN?y#U*)TBc z!-J=DwW;ATaU~!m#KuhIT(f*?LE|a(CkhWB+Fnf6T)M1fIkP5HYXXtOV~oCqnJg;x z$8o9@rp#lwVr|IX&?Br5SsAJw0ZQAA~JC!}Vf-(^JFy9^v9Sb=> z(;zd)GCW_Vrlp!}h7iXd!yWv1dN{&1JcVk55t3T?M@3)i?pk+w9AJtMjXuG8k+80h zfC=uTNYpX~--Z)PcIsZn56xsFSg8{!coU!Z&UKSVijkdmsK7x+E8_@2+?TMVJt-6i zp0h2Cw^mkpa8k1}1y95ZjK~trR{=cDs;KD*4o7e|R0AhNdhek$)N&wN5*N`Bk3su*0CuKF3oRw>dqqJq;y~C_Q>T%O8-x zA7B6SdM}j(hP6xsf0*zzf8K`h>Yqo>^yka=6HuNjGvT(e#m_v&SRl`65m!qxrQJ}Z z)Jdn+{VLHXl48Dy1oAvE& z>-z{o6S&X4XehAqHKWX{4#Evt~N3>g}YLQl+cZp->K-LNlFYdFDi;S&7Q7+7eraP}kKoa)xVf7>5U#J#Lf+5Yuq$p6m+vdml*gVIN*EjEK@;S4V zfAtF`3ca~7wieIki6GR z=^nctBzM1lgZFB>$_MAgSj%T8Hh>VRGzv9|Lg{3JJi1((XF=4o-itau`Vw>U2Z66* z!r?=h0MTxcDUm#b5zG+ObO!&ZZ{_-teD4?-CW8|qy$m+dcV3^YO@BLQvbC5WU z^N>AECV#kP_GlpTjTE$}#t|dDgUqm7_^!P_?_20Xz82LpL@{fwV@Qb517E^y$V)J* zWL)(n@Z?NOQ$hA_o;8ZnoJ22e!=lS5-o;2PI&-_Be%5J~o;Pi(ce_G{R8)2M0?52W zBYYZ73vrO3AfRq6KB6in1C&;%^r18R8a${&2sk#BjR(ezKDlYcm4i(@wMi$W(+cpx-k3KAGo?ot!{RKO-ZQU<_n^)k^iy+rIc}#M#eCA)AG>FxmcNUl zq>p_N0=dNgd3Q3CUU%h0wq?0~>>rSogWiQ!45E=T7J|3Xci^0eq~x zDlRCmemf0k+0Kkv{l?z&2fj)3x0!6iqyc^KE6MrK3FX2gRk3LE(XS)h3M4DuLrhD& zPg{4@Pv?WZ8EB`42V%;;SnlmHVwLGmQ(vjas!>%w72jZ+NB=7Ln-q(xOX)|hyp@AZ zBq>OW(3^6k12YPryo;6oRyt36TSp}xRyTc%GGez>OH*F&mV${iX8JrHL3N_kYT&?Q zKbDq}sQoTl3_59NT-WLgdgzuvD`CjJ=$=1R7|bzhcrIWims^+C1`H zwRPLOFm_;WP=9T&VSa_&^KD?RZAG}dKipJs!GXEWg(Lm%D^dImNQO&QMTfzHtRq;Q9r=yK|< zM;>xiaECQ^b84&J4+y^P2`6yEU#r)}nQJ;i$G*K)?jf*Lr?Cd#P)FP?%wG&NEXV+K zpoI}lbwIlOT+-8f^P;-AxS42Jp2a&xO!5PNyjh%U#-efE3jC;g={;B@;?-;nEWnf= zw5oI|t3!=EU>1`m{rvyfISFK9p{pL>z=1Lo#=FO*`3@QHM9?7wYXYPX80g6Tzjy+& z<7Cg~xAp&Ek`T``^vv5LPXxkiau=Ii^XIYr`)A-Evm$)Z1a~fCR(~)3dx7}-XVTR1 zs_4vLApGy=Dk6cKU7kcQ{=M|?1puUlAzTWL7U59pf&S`)8%DsSdE&O-NLp|<`3l2( zqNbw`%BeTjJy6XG-&9&6x8i2nH0}$*x^sW3p1+61*YF%DHV}C=Tu1Z0=p3q}!w3Hg zsO~ugs6hBvmDNTUG8-EXXSOIs3XyK-1Va4$j%OP&4F4Gl)e*GSaAL?e|MGGM7+BbG zZ~>LLxb*OZJW#jC!o=5iB%kMkc?@6xWko(g;+R&!Bk+9>=D@b9?5Qpp8+&%K0t(c; zKsIS+wv-p+r1}pXjkBW>aY%SI$9EXpEJF`L}VSm-dqJ@IpBDJ|XmkQYRd;-CY+=mnYAIW$IgXYCD_EBv3Q77Gd4 zZIsR8xXZ-hag9#*==SYp*XBS1xW)NNLdT=>i-6#sg3Gm-DiVy6&>A>YMGcSkZU!u0*Gwz(4zEM2sRBBd-vzvX+W0MKRld0I4HTh*np(<-NlEc zOh=~DwzB6Gg4tX?(u$j^!pV}umBUYB>|eLb9*FPGCZS*cH1q+VOpf7VNE zz5{4Hd(QGWy_D_?G_DR6%eAQhU(8Y7l*c?E8rR3c#l;24Hmm@07WV7rx@uDboZfT< zV!-v_2Q12ZJgUf`Eyzha!%K7DX8gOpWtRa~2`pu7K%DpEP1B2&Viio&{aHD{(Cqc$ z_jPGR_yYdkNWz?~uD~&XtYAhvQ>kF=2HsSm3Wo6mhy(IDJ-STWbK7Ct_>=MC9SGUT>*ilH<6? zat?G)RWMU8!chGbfj30q7>PfUa;OM!6s&DLNog1Njde?;mm1G|QmirW!=gqH0ltA0 zpmYZ1h@i$0BS|u}og-fC&xI~(YDyJHdy;;}gplZzl9om#KRJ8**T~^F{c@GUM{jJ^ z#nnBg&D)U!`?YH|IgR_b61kiG2^qx*On?PcT;3`T(3DD&`jqG6pKBt3n67A;nA^=C zs0k=MQ_Uu06&(jt7ToVoUW>0+IUNw!?Cc#C0AwE8&jeW~m=xnaP;j%jJKpb(c2`s; z;EqO=?&dR+7>RSPmhTvlXzwPoq%>G;y<0O42jYQZ=9oH5ZQ6U=H^ONV*-x!!@wboL z8Rh`N*H~3FMd)nHQ;1HzCKnb3Z}eP*NLf8^#Y;nYy(c_BD$31u_Zx#iI}T*LKYpz6 zs?W%FoALjx(Er{Tn}K|lCN_3e_OTUmW1ws)66eE3-9hKm4KdW9y+OGdvtoTf%s_LVLDUSGX4q%2IlpdDd*@b%jIUVyP~?3YxjrSKu1Yn zb&80nR*XyTMCp#CryOijQvoaoY{+SUe46f!*nbx#0JJmv=)JTVpO}c1l~2eiNRj{w zOK%u(-qpm$#FZWt9XLMj*{qAMv_6=2Bp?+!AGHDySdWnzYXEksERMOGPu1ENn7at$^OvFS*M?+rKFYm*}MknjtN2DoU!WI2cfBTk| z^$PL;R0&RbOZpkjr$=`vd~Ez}<`?dbx6cKyKd0pLo(T=c%Ov9V@P|di6W~;Jhb#?= zhg6M9K(Jyog^d3mr@Oqh-SNMzeZ!0_fx z>g^SARK7=XJnI4@PBbCu=KwwqxH;8E1JV>VqKd@gPkeN!PGgbr9&PPqIKo_Ck>K%={s%U%4UrIU3^S)hmc zsS4Rjz!xP*K)iN=B`$}&F$3;xH`WLE_>Zcft2`ncKrx!KF?JV z7dmI~^tf#er%5mx4Q9^hcq<^tfB2BV7S+L3O`7JsKRb*RPoJBgo+=D)^<~Fu#q$cq za-mUpDrnCO48k>~g~LgF@Aam;s)Et^5bnI~>S<&)egE%E{@=yI1x;{&54CzKUxL67 z(agn7O+8oP6y6e^<)PG8uS;b8!^A1d1LrDOo4|u`zs}x^nkK=@gN|$R_O2vk@C}@ z$ChM6q*=@)ie=UMK2c-U@*v$DF)W1Lc_YZ-eVO`^Zf~NV!Sm^W6(&UQRKM89uXD>E z-Iu)`O*;`c2J0hi9|P1z6;`kBS=zt;M<;mD%s&yUZOo z4bCVjyDEl;b2VcMqTC!;c5eVDfJ|TpCIIS~$?{_J+F{eh4*$a~+D4+I5W`@SrM<#1 z?T`EIsTrlmjCh8emL})1)a@rh{hr8trM8mH?Sq1WtxpF(ukXFJ*BtXSO_nmeM|JR7 zXXWIp{%k)!F%lBI)adt!<8VO8)KBgcLVu_Taw9C#EM;kEbT9n+4Qv`5`^q~)Z$!8N z?a~(c(f<#0Qcw2G7Y{@x=GSV%MfQIw&!Cuw-^s7EFNRuD%Aa-((1LSAdU|>a%)=^e zUz(J*u&%f!cxhHf=Z&~XaykqwctODx0qXIU)%d+=#6L@L^%4^uW%$6Do(>qC>@WhK z{`!H2R`iSQjjTxE{l#il0< znNlOr=5EYSNx%!3_94V}Y%Xzo7TQhmiHkLMUXaf#$=Dh=**WD!@&I4^(DAfv*pWke zaj=rHxU^(Sm*P%$bEp9VeVDoIiW~->oW|(4t{`w1+GBndz{`$nblBr3YCVSahUUc2?r`NRh%1F{4#J|nmiSnq&XBSD7Q&dD|h zsBZTUinf4?zkH63#*zUF>>F}@GB*crDSMXX)9Te<6^lBrcE^3oqj)>o-XR#KRA;pi z@u0qF0`P5^>&7!!v3G4`E>^zqkfT+`6oGn3PLVy zwZPnr)V=6jDr!o1GIr-^HIhQAJofALD^ugXo%~CoHnwb%IN*6gYxE`SS z@lz&5!AvL-N9{i(RDVA3+h;<6{bKG6uz_MfLJ}>b3usa`!29D*NCaj2bI7sIQ&ATR z_DY|RpN))FSEpxxpJkgWtz4&^U2M_zbfa0LpyW(}JwFs%;;hL5p zWe)d&rt|Z>)3QfaX(ltnX;{-W)ZFZMucv^%&8?rbIBP(-=^vo^UG%0c5E09AhPlsw zN|G;ThDY+IN;pfk`jt^oRP^+9ZFz}*`jcc(xjw57T=m~@14J%>4g8x>&gv}QiZ_6l z5cZ~xV>0-;@cRzIbd!O1A|xYzY*(pmyeN)FF~JNPI?B(_Pil#i5F2#bv47wbM3m)3 z$T@C&wmo*-0R{HET2|UF(`@bhwplgh_30C=RzzD#YrW}$cCGzMrz_5v2nDLUoz(3I zMp^<={uDp|ib+u*yrZ~&aj?)7wCwf3>?rx~q>__Rn1P)4bGM%-6%rC2aaE}h4~fJU zh`rP-xIZlw?uYu-RaFnGBdg2KqOv7_Sg_kL7ZmWXzo&k88hbJtaJD_kBROkwB!ut1F3JUyEU(PEcCXqepaF^|e zy#+ZI$e2=;HT2{9ETAx(@3mA628MiOrcZ;*n(qg+)a!302>9;=NgvyBx52%}7``vg zcNY=>Zi^V;w5#C7{Ww)TvE(dWrMgG1T@6Ql>wwjAIWA$gaxSCU+@S)NA0$UaE|KbR z+{Keo0TdMe_+%*mSXW3t1OLOeG1|c4EsF@xrs6MM&Nv8( zNpHg>r*MX4x(>_HH$y5kzLedhl>BDd;ht}5C9(n>U z8@_`XqctiP`uOY|>B`7xM(&eITF3i(C7LOmM^1E`dFLI`aMe9buhX|nw%chvZU+C5 zd&McAk&Er`V&%{9D$KtW9__J=dOAyiq@M1FI$-OH3(N<+ zZ{nPe=EVRH5cy8zdz-CRYk0wj2iDL+k&)#MtwIdXt93i^ji1)T*fe(jO~`ft>}J_V z{LcykP@bQB|DJtcUf`Vl5E)@Yd8B#Y`|+V2{D5_rn9A)QQD?cA5q|a83t%C6J2id; z53;7P)|xJZZkxoU`Mog<63o$9A7-!Xm|RyQH;_t=9})0<-X+A`>AcDQ5X5*ea@1Cu z{ls-eYq``MjK^dq1O)Gylve)hLDyRZacO!ldx0qk zz#{DT3$k0_+4M6r)7hkwi9GK49f|iWgU-9YcA2`Mbuu7k`}}2U;2I!J7O9Q}5X|QG zEiing|3qAy+*TliN3dLeX7FnGXeJiw-VqZ5@($2Jhqmab4o-Y~&@Izux%NwJ8qV;# z4FFah9~cZH^;CSZQFyS7J&m)eMrBsGPI>RpygP>&V_ON;MpMidJiZ&!;tBa2(Q8>>_6%P7cVwtVu$eDLPjX}|KHcfB~v^QynR7Ci#y5YSbo z(lpws?liYD*PqEhX0)7riOimVy!JWQCVk6?NSY$xL_yn+A91QI=Jxx!J$19SU4-7Fc#4^_=A`CCx22aOyK4=*-sa#pXUvu|btOv1l1 zNIo?(5=3dWre7bmfn_Qlz}Ikk8Jm%>;a{Z)tQ%;{9JDuofA8*cm7{~tS(aG$_E0;XZw;cz%%8l#gUS8!v>5Ff%FActI^?`w#ynj zaQ0zLm1zY5F9x71bKdA+664N#Bu}R8db-ks*V$0L>EnGsY;3HF69GVb zNB^tUm7RwF8mj)EIhR(3%{O>3y-~O83~c1tn%;9R${i?9U3SNUK1Q>cFAzM!`V65Wfk|?1~wV~ zRT1+gk&w`v8UFe>K=QogEg@yij_vGh>l;mVbabkmZ8A$W>Se2qhQKGi8JQ&|w8?BI zjv)~|){fjJ3g#=jF(p(xV6YTVtCwE;1dro5r26Nf^}k5?yE z0k1AW1sCR?_0`o)n~Jz9tfyzi1uI8;d*waBmCDHQ2F00vkBN0YAFZsWS5oF(uN@p6 zRW6#E++Q6kYmViS_rOS3ok{?$4FKl;ME`g_j=hsDxy}k=JrNa%JAS_R!e)(Uxm<5x zMaed4Y51t}yJf#CNQ94;4b-cy{wF>Xl?ow4%Ls{$OBa|a+teWN4F8#Y-bYCc_{YG} z#nY9%py(VfOOH){rJ?#w@5tL_a>DHNX>8Kl-id8so!Uu|+(oZ=nlMgcrGb&sRp98; zdb)esa>l88mKgrLKMk0L&ij$ zr*9&6SBLM_dd7#Sw$h&MFM#ikXyyJ>HTmvBLpnN}=ohKJUjBJYJV zujXHhL=a^D_~BxP-o5Y%F@lgmT)m!OAF*6pK@Amke?`oA&MZ3^v~HtYIFpH>gn|kx zMzF1MUB{Ddj4Dc#Ugl|?ED?4;J-Gy(ZA(u)dU8^6YxQC})|*WE6hQ|S+M^N83MROA zBJ;PglR?R(*YE8KgefdsYBfmnPW1x*hPD`RY1jffJ3X+pdKY+1^6QOW2OI~>4N1ec zcX`l`+EQQh1>vrC(1HSmWy94S!R3ZX)?lQb#cV_OqIN3-uqkzo=fbB(hcP!5Ax|x8Si8Cbm>DV#xMOzp00W#&!F+TA#-XT`oL^F=KSVnuKt0c zta-;AtTXPWdig4TBYxGuT@mK25Ns&$d7*x+ap$jB1oGjfA}HT=S{=4FKFs&cT;kEl zeV^fQJQVaURn4q-=`h;_Z%XaWJeD>Zg1u_^kKG#r1CH zy$S1ArP>$9J|F5#X9Bz<5eCn;9*k6tg$xLb6zU;nG}5JyFW6l^*FRQ(?&cR;)Ux!* zr$#(2q|C>PxV66Wh;Y_fl1Q}KGDYxAm9Bi1kB*tus}29Pj#ST-vX?U*l(xVsA7f<6 zzBT%7lvvdag*^L{WszDnMAPH^SlK}eRe|{X6`hm8by=U4oZ1U)r;$u4d+#9RySED< zK|Vbj@u2D&;3(#MP5#1M+{_MON!fXz$S7Kn7vNiF($%O%%co<*QxPZHtYNF=(32i^ z$=;rON4XtO+!vqN60pYdcM*HZOA*z6+=?K}(;r-CuxS=0Pp2Zz%IxnrKd&y|)5kz> zJDy}LFAR{0Bv{VZGYjNlQD2R2Y2tLAm|?iKoM0^{r~wJ0-zFyEybx{9XI& zue~?l+vKgXRn-_viuw2-$LpWoD+_Q`gwCkdn(UF~^vIRYUxbGi9s%I*WSw9=Wbb+wy4dfEOGdHVicF>`X@cXSZIs1-a0zY!xZEIB9gdoz+{(!=f|P znNYCYxoyfln<-3|QaMEK5$g2^0?dgTk=lNy`!NhvRjWDLpCN6HuIlFZi?+7RI8Z^_ zEuV6`?^jcuN+ekd_&PvRO)_1{7b)S>RN|Dp?c6 zd(UQQABG>>KYh?R-rj*$+$9#QGUfkTnc-`G5blK@jD*50-BS5)Rt}ILW&sHox}fDis(WTPVR-udOBv7oxmz$$)M@A@c7yoxnr3 zo|fmrgG)2bYGG?>iVJ`5GGv&YQsrm#a-i5ET#d>N6A&nUq3Ut4Cl#-GC5X5FA zJ_wL;zho~Y+(pI5mk8L9cheU|rLw5MdJWds(I|E^T)NCw_wtc;>l~W( zx6>VN&p)U)ALA+!|3Xj4WA#a{wySWuuqzrwL^A;YT2XYfP&GMzt0a*?NF*1tW$V$` zhbf}e0MK+QX|&ep-9*?g4TkzbMf(t5yLW@vPwSKewFuj<5pM~HWhUD0&$arESgA!J zd(#{a=My3bmaSD4Es1CAW3=jEsX#FCusiPb(6i8m zHsdE)tWaylY%()7R3FQ*iz-4%2|{0`X7epvr~#Lx|4J~}p9}y@7n3Mw6PYf-S1;&p zsy{DcQ1pAM^84Rk4qQ1TA7yxzAWxNH3IoorZ@$Cxuen8G4-X_Tj1DsJ*0?2>rIP6O z)SzK;s!mbd-+31GU;VhL&Ug8>5)=0gv*z<7tA06P*@MWSW}+@$p*Tb9p5ZA5^^1chJLh(zS|+s7?7B>4zg; zR&rcHZP`BAmT8Xbint1mA!m1ZMVSaHNeP*(i`~)p(^(JLx-C$6eANkTHk#SaYjp;u zc~Wc!`>nM*E8hegUB}DZr3yf+99-)g8@;nfUM6JzJ-Z+^ih|YD3fA`~bXN*$=`>n3 z@r4!ErzEz)&`Tw4Pc>5TNg~_ifTGV%Yak?Ek=T!z3*}#};vfU|P zsEVkBCPPXxwsh@WiGz=htFB`$22ebT)gWx~=yf@yRH%dU2dwqC3#$!MN8%8y1blf0 z2306&qEOUDw6%3&dr(Rxk^of0St}uEEhmS3@}LcKa?Bt*&i-Y4SEnBoecfZ2RDn^!XUt?>qxsj=pQt4ki{z$Wij6{&V#GEO- zx&F<$O|!AAq=CId(g9qgD&BjO61?tX{Tp+z?P4Z9W+k`?5MLIOabg&oEf(*n4+S=> zSwZ2A36_Wlv>2D&(dSOg`mPTYdt>*81$-7KKeL|km@KtwrP97qa*&NGoBv6N5=of6R0G9MQj=E4;rb-*c9sq&_6 z&D-VDr-n*3*w0S8)T?zUt{?1tD!^4zNL1%Snk7M&lXGSmnpXRv|MA0s(QMf$pPN_% zDXVcEn(@>^qX};bV17i6JcNAz-8ESiz`tVDLj6j9Xaj>wXf<4E_bUtpI}D@Y*I~oF z9P*_j@@A)1Sg^Z&;CTG>Ht!DtS0N8b?Ft&+18^26_u(Ad}!FNGfU_<*m zGB5J#t}?Z6o-T+u$6DhFdArTV{go$mQ1t z0*NyP4B&$`r-|SL%x5k72c_VEVVNnUBohApcS*Ld!Iv0Fdp6ywNYd&x6xE3t^%bmO z&v)?=B>t+3hQxGwTzace=+s?P3yCQ<3+0k6YeJFlQdWre;m+->K61p`sdIHTi)RO(3D|yD{j69zR9FspJ_*Aabqf#L8iLBc zvq#`@<`+v$lbGAtdO^_LKT&jb-5=+R(ovqAlO^$51bP$D@&`zK7HvFWjwP>m_#pWN zCBqr@T)$55gjZc`M5FfDF;liX-ThwfMM@JT{hQ{28<-f(fuGHpwqE(1%nNDbv9TILAlY^iQxAS zI9T&eRhgK0GB5D*5({H14455j%2=0|msbH5Tl7RBWSgcF9bvl00i?K+4asVw-gMjX z#=-q=Rx+3~uGFS06ZR1jTv)&+_tYf+}M%v zeoM5qjU`@+&$qRAG#JC{;a#br+fp9x#SzFv8R}q5e^3sx_YZxm#$qfiB_T`u+pkDn zxhr?6&z}04EXaS$<9CdqH1U$#r`{kjzF|Agl#VdS=a-!bqYYL5!U}sGL0qs2nz{fX97wxyLX_Yk+ctn-T7cDn0RljGeg@A2wvZn)w}1-zDoA*8&VgGTly`5!Ydg zZ7OqdcClIiR*e-?I-^LHE)k)loTt{sYW0_D^~|>;jeh$8V}usYUdR*XermNuLXPLH z;I63>u@^9rkNDa&U0CFI0K|s+>ry1A7LA1(gGmC&FGS7q$OHq~U=*#*lrznwzg&U3 zgeA5s6?YM=0VRA3aaOsuD$^79?PDFygNB{iaxG1h%>L3E%II4xY8m}^-)f+IZ+wek zKK*?AlAxDdY|}=j#g~GMXU5uRPG<^WC#>BQ>`<|o1^U+`i0Iy?}Q8yNQ=Ls z=E8c5jadHtvRXmWXZcf?S)7LZ%bxhb@(qyulE5b?)FC%j929HDpB!#1B?A#bv#t24 zdP+>{s=cw%YKRMzQ~SAy9v^|HJ6NvzS7`hQJD>G_TE(Rc<+sf)_X_)vHa%jHPZ$?q-l1cj7`#c5lw&B-&cB=%CQnOo@B*4nTM zfD(^jXOutuLSv?+NG!Lo!{N~7%|hbk_RboS?2btYXd9hrLZj1SlH;AnKX0e}Y+8ot zztmz8y8aQ)lH@mbNAGp<8YvmsSeZksidiT;`UXQH!tk>J7vg&k!G>0YxANWC9b`i( zM~gjg*Ku86u=?`Q$vs@GRHz*E9&VJk8~xun8_|TFH6*i@=2{#%PiC2j)i@*25;CgM zj~Bmq4KPx2Sp=VPF5JY?U`|dp_fsjdZMuJ2aM~-?)%@VuSwa(cx}x(fEi?F@=yh2PSn>?ilLR3X8h0%zqsy{l;GZG?V^1%cvmjoK8r8MBLg&4%8T{w zjRqkRlmz%E@*cxmyIYqEO=vN3} zngPCjGbzwTQb;s@w=xTY?R{ebN6w4wF&2R8hj9JJ4kcUK0}Vw#w&mNjk6}fOr8t|8 zpWzD{yg#HH-7YUZnaex`I+-ozGy9(!DLpow3B7TtB^Mm86x9d$4`+NV-2sl?%Inda zxm+v{Yv99$O1w{wd_hpR%C&KEZ#ycy87=PDK~!G8M%tJ*qWwKy)>`Gl5!$nT3HnlOiZz=_naq<|4*H7%}NnFEQd~_`h1OCmEOI|wK^uPlIT!G4;cJ9^ZIOq6X_p%S( zwl~($sE;?uhI5<@4=rkeXxLrSKt}N=ACzX1o#HQT1{L{-?(Kj+_&pxs=D%USC+1+b zHb6rxhB@;oYeIc>I<-ZLt1c`i-tmy{t7y1e8eN+;dY85P%fq#UERh02!0GTyv<4+bHRtpz zw2g+*+dg|vlJU{1&*nY-Y5q}SJm(NSy}Iklg>8p7MnX8DwP+vYQOXt<&v29N^E z+o4<7DE}Wl&iZT!%g-HdYaNV^Q4|O8-pL>WLQK`eDNy8~k4@s;_HBhfWHI&v>^zS` z5C)iGU&Eb{V9{Y47AQ=0W@^_-wHXT0`-nler>%a(=BMzOVmycy80A3opcR^lxFV%E>d z#}=qmij=|savQ!lq6Y6_uud!fluvGc#v5MJ>W709c=ev3%cc0b$!SRQDRa|O28ZpD zk;~&cR#KyopAhF?`}haen-L2eTPNZnz8TDOqQzc9C;zm-O7rDl3wH3Su-%vYb{lBX z7BDfq$<-i?5*?GbHl@vTD|riyU$(Py>2#s6IHm3m5f}T|iY0*_teaI(Y6`dy1w(0` z7SCvX%Gu4t;GA!q#}7=lSew=_|HR+86y^N}Rlo4>>b)YQ{&v3PsUFHw6*S*i+lm*W7$MV&loT znb?u6unRuvIKYBX%O-?cqy=9cYXeuy28>#T%3MkDRdWXTVEl8}v6OFW}jy7sW>c3>J;*@)6+S?Pewq%kLmw72G zJ80INkAm$&K&cSPG1$aS5hcYS=&S5(h%1yq7wh`^s&LG=AD8gyq_hrl{`bDN2ejli zI-iQrY1R-Y*XMWG{{TGAV8ViO>_*NYFHDPq`Id6&btZcJz9eiMOmldJ!E6YhUtClR zfvy{j&$Q3i;xZR2ox0^ixG=jMNQf;I!jqh)=}F(HSGiLGQWnX?Hb=~~T648!>HLsE z-xNgMp;UJ7e>VyH^ zDpHO3M`!0Rz^5-N#aCesIezvu_nSj0fR;>=tZqAY*!H3+>~)W5WU`IT?x;=%^JTV` zCeLR@s+Z=_z24(TWLcYcxyDgM+|Qo{FS^(`8qJbder{FDq8~S}LzsYj2d7r~EK|up z{It!@<#7+8vO{ZIV?Mj9JpJ+>leua6>!wvyqW;WF?!FRV7w>UE!s_Rqyd+6zEA5Vm zV;tq$#*vgUgKv+iOy(QkT!l#~YGeg`-n1xKK$WSiX0aCBf4wSz0s*0X zX*0cZt4Yxex&}!!dCMJ?PJ`!wGaM3=Bhkf&r9$_s55F)|S!!yBqxpAMn9@EWhtZbX zY_Eoz;ZcxMS^+?xdQFNvr^OM-B>F*h1i5d-o1JH3I-DlV+EexpmHTh{2uhgk{h4|C zT)C+WKj6*PNtG?FaTwar>u`;#rtFYo;8MoX%!e??>YT9IDT1p24!c*D3+sm zIIMSSEVOtOHR>)Taf_Hq4gLjH{1Xvg;m#Ww zV+#ic<~A+`#jaK{n+WHNHMQ1K1z{*QD&%)mTXr3p{ZLj2Kp&Dbo?aJ2#Qi3X*FBNf z3dpV?$lk~E$-SU7dY+WepObmX-{3Tjw6Si?vR)-Ojg(f{M)v%ZTuHj8DE>pU@U4_4 z{FuYmqr)LtP*A|Ku7~C}eh+`S$k0X!+4q^=Jg_}nX96K|*LrsHFJJ2apslYXF_rm zd~MGS#{ncem|08?9$0M0-TcJ6`vHtt}_!Xrd8HP~wwvE6?9EaU9Bi?FZjy`n3 zt6ehi-_hi+`)k*cBqZb1AY&E0ncz%buxb01>E__`4t_+(y(LHMfA3TMQ-=Na;N{2^ zGRL?8t4gDCWxmEZ128oFKM&313$%@uE*l&!pT*MU4GpvE^D%yAr2OBX7kYEJ4+!F4FN7hQxTJIzT{2sUpOU4Yeb(Pl-+#qN7reI`(7UBg zq!UE{5iNHp$&ata{z{l-jbQ-d)VC?*YibKPP!gUE{Wv;KbxMzjD2#ODY#LEo|8p?i=8M&NQvhhja-7Bt(lkQOSCU^6+s*V*;4 zc|5#QJWX$avY0KALJoHo#<<(sMBX#+oYo3qw;7NA?EM06pmJo5x2UA5x%|8QJiC+`=PCZ$7mzsc{EFY`Wd4LZ1XvEPhUg%m3FCw1G9TZAhf4wh zA%gd!qmdp-fM&m#N)ZIw_4e-WsuP^7zJAmJ4%A17`vh`~l&x6f!C${JzTLX2l9#9v z4$aT+AFebQ1yOQ9o}k|Sd2i~OUtufEOCS|s-vHe_bxR&EYN~BAX5QdRCdGYC?=%VG zySrI+v;ORqVg#q==F}ENN1kRHN)ZaOvk)0Npb6wSO~dZQ7MM1{oJRD{Sn_Ev;Rn311myF!uq4~)>F-e;JidWZbGrt-b+HHuy3KWWU*RB1dz+! zD+zv?kb5>VGR(KQIdOctztSqSx3dFP$ni*oC$nW6JKx=}KiHU!mZsj@bF7pKK?7o7 ztSV*r61$_RAdu8c4lunfV!B2`VzKI#u6dfiGy_yVEzDRcG;o{fl7Ih`a|MHSdJt1U&c@o|b|abA9L4DKFgU9C^b@#XgSCz&^w zmX=nZpSFEDNlBN9z{3D!Y$q2_TgC8CP8<)mB<^qh3j*Tl%=tmD@)W@Pu1*fb9}`hU z!Pga$Cx!qPJK8@69#rn3;jDUZH4WPXXjY^xFE9-Z59#$B=aBcTsHW!1%F4}Np>^EI zMR)}6JhCKb96eV2cDZ758sS!21$`i+*e2rrGlKE1T*eo(5HcJ#@a#_Cg z%ex!?qU3VE{O;|ez(%c{FRqz0{>$A8-)9A3A=DZLqPP=pGTgH|7xsIMWxC&O9u*S2=aUu|v5 z*sn36IGhz)s^<2^lBDuqIt=jz?7@IX9aEKnL;sPTb)8H!;}$WK|pvf^qSUi^Yc)b zfL~h0q)U_=ihq=l!emgKhK7PvXt3XgV`2TWC-1)Z<42($nqIm9z>TM|)&2(>KY(iM zo46WGX7txN&2y&idiw_xIMYwIdMFrqjHH~E`uQKwQz zDmcE-uv0XSC$@{FtcN=wobEKQGFbP%hF@ZrZ z>-Tqdsnh%;8XDl!;MJiUl?#&~;WSy0l<_35gIlEhas$}CYCr+k|i-){+-gqzyo znl+Ll}IB#{$Fa(o~~rqL}3Fz&ij zdCAGPxtjLa_vDNMa0m5s@8Jrye`5Z62-jJRU$GyQs6|<(@_ksSIlMzhkq^%GynTmA z!1R5VzLIbIGWDRY`cG%3UJX{cKr@cbW=R0(7Jm!UcfGB)ImK77J(-55egdF|YbO+I zYkPv_YsNW0M{L>rnFEOGm(>B)r~!?hcuKWAKCgR4S68P^V#n=44EO8#Je&E*+GZN% zb$w5)xUXyRU?NNJFfmJ`-w+yoSekz2rP_`jj1^QB!lK;@%W~EfKiWJ^o$$-A!S7sVEArA71zS+oTPRz-!$xxlRW#JCS z4oT-f-&o0SZ)OU8uX4+k$6P1)jm1B))37Y1#068NPf;;2n~r1vu)&py^ah9B)k5_6BiLP11= zRe+0CO$PxW<+OszLL#IuSB>G0`m20^=><_klgAj)fO)sH|7)O=KVjLvUTs#k?uce2 zVrhvd0(wFCVs~Ww#WrZXe>`%$(dRjnx5)(~kBjqf3^^@|K<+R?kOyM#OIzL zq~MtC0|tK%j~$FYqg*p>L+*NH)tSXm{IKKo(TA(f+A}*(c8XV+qBf^Rgq3z%L>kTw zDc(|>-W9m@o2dGF_4`vXf{8o=A7dHTd#1@eFIk#+IX<-=6qih}A1cRR;pkp~`?8gd99siliW-yWoiTGv6&DElV+$eh zfdqvBOYR45^3SAbL8|5xO;n093Axb>`{%cx>Pjb6d)NAHUm=$%0_tBVd;0o{z&LUH zSE|G!JK-;WY}2SlWoM(sZ4RX7`M~J#xy)JKMf2r{&exBe=yaD^3=SVQ`l9+C|K=LD zT4A8wG3z_~H~^afhqvJe=`)t`=}87Zo##%c<|Uk+Jc)hC0VZG{vC zXnvNO_DllsqA?=p^pzSfZvLgJ-P3F-3K^DjrP%_d1aK9+5NVWM=XKAS85m66B2Cu0 zq%1|zKbedYSj0#v87jEalC0rd`O8dF$TPO@T@5=mpt-H$^fm zQ3Ym3_%Z3^&9W-`FN4w_6EfMZ^yy1*l%?OU@$hd0W$24`x9ik(kD6d_VGwu z=aP$F50}|wL-CY8PEy<3^+4KjL+}<_%v4fpMx*tld+*%U3KzO8TJQEkHcVvUFA)hK z6?yOL<#C| zPYjgKyxY6W4d@;J1v$oqAw&YS=IT~bhM#XdVkvF`qlt`p+}nZyNL@m!!L0$nQB4m; zF~E8|E}z@!pflop_RBE8MYZAO+V7yfHDB-p&-#?Bqv_<<9cjTN0a&y7N{qWRV!I$D z{2UA)I<=0n1-oOk=Xnybh+Lo@IF>7eGkwwR^kF-7dUZLY2t1}WLeCwute-G>6C>F>p#rteHB0soHN_IARWp}xN4 zg=*^({-qoVC3n-ormgL55wS>oP+E(}Bs@cNK7Xmz+~VT6MVVZVMX$IKdvX-c~YNxh_D+TcJ z7=Dw)Dx8Mi-kDa@+s#W5CXn`$gUs4cS5`*1+Kw&qd8`n}2q4ZT;}f8sS!sEw6toB6 zpATYqK|c?R%5sZXDWiUxv6gM=LN>Xp4M*%G-XRG0p8pD?+H9w`wsjKe-LDaR|6 z+%VGJEo@ci`HG+qgjur*OvP&|IO{^SfK=TFWD1lXuJQT3F9iK4MIw9bD zkCz%!4`xclh=?-LyJs>&Qiw6)aby(dR!8L<#;=E77uzJc{mR14oGvMgl4G-%SISlf zVDj3fB$5TZthh~3%K8L9#ZbsmqskZi`}2MOQ>_EFHKhfFS?Swh+#gBGA_O{&eKWNL+B_%1|-ouyY3xIJ9e45OKED>&UW8KHn z#g=osyJPu$sh-4mfnz?GKTdaS!R<$_miv>rExWHb%w(JVJH2kRxM}!(xD%W3pQ66^ z4^@HJRFRqDm8)=E_oBoQE5zm1{bE42!}#c8Iv87UNyVV6;g7}WU~5bIn)1iQ)cs0t zi^5t6V^*s%|8I`MT(=+i_aJ2atd*AgF*aG!H#D%(u!NlLfEnWmESToSWPcsTVUdJd z!RPQ~Mh^A61|}NJ+^wVesssM##;b#hE0a2Ezu-3;ZU8{0pf&IJ$xDH2N@!uIwzQ7vZBSh+!09NBKm4S5x-DGwgM zcUWhq;Iy~8{qMS8(GMRML<#;{EFc|klK|_+gY&dPbZ7n@iy{jf``gO%q9;&>0p0=BAvXxf|HY5sRs^&D%5L< zH8?UdJBii8UOT+hQ_&Dr%a8g;Q*x7}MYxkaF=8y^G0)~GLalyBONs9C{r!7_O{Q1V z0N;mXfzP6FP5X_M>@}UDaAc4t&90w5|4NiK9Z6Ba#8J1_@9>?*ei%*XFMEzR-%V7q zT}ulDbjkmAq5|IT@UO$@DPdsYd_Xm%~< zu$Q2>qi9+yc)W{X8&3+96rYFFL`L~vz*?BiLO>gUsXXx1QExU7&R%IJ?v8ekH{=Oe zl>R+X@yTb+&hJrE3FqHDi*|Mzg=ff=cv%c4(`+6bjQk{_In<*sR;EumdSf|4y&~OxTEENNfN1pFDavp7RnNQxgfhMQ;nZ5S-D;2`yz?+{_DIrlKAQ#;G-K4~^aBk| zT9(+2O{cbLz;o?jK09bWjb5)pP-bHAEb`YR|2O({Zp&Iv34$pNw$h7LR9(YRqpjb7 zAq@>IE!iAW*<`cwJtRBhD|-dq<&%v`?dN8Uxket>-zX;Y&W?_x!|sSu9I=EBimlCf zZfg001;NF%+e_8*$=rQQ6__OpN;hfX^Vm5>Lp{@RRYJX!8v$+h(plCrAC-3bISh;4 z%P)uIRuj3jbG(3N&lJErSZH+uuNtGMK^ z&w&kTIa$A}OkTpv_<5M4V^9Ikl()(3Z<1O_`)p?+deDSEEF0>3^#JPfjYgkSB$Xn7 zp3-uDkFNRHr~TB5YAIWY3T6@_pf7I4<@5!o%t}MoO_S&s2zPY0Vg@v9Xo~=As3zG3f{3jP>Q_Iz z@JZpOMh_*;kSnva%DY2tIXkOdF$$0BYM58vRt41R(E$-g&0h$z4ae*6T9is zJ8j3g1Wd!J7pczoRzAcXg0m-2oY(eEGUzFbCmD#;loH-`i@aTjr@hEDfMT}@GVeKr z*YyCmO{ps#SkuoC4T|wq*P+HX)Qzn(?lUvo&=w^HctS`4rhBA5zH?O#v-fJFg zn+KCa+8bpPPpR?@? zeTey#HfXC4^^Ma3YfhUQ?S}Fb(Z@_GP>$6@sbd`7br90YI6DD0<|kJ3X;51{o$`ZV zLur2CFy~?}w~j%8`9<&afkAA#p2)#cO(gYX)2O^GwnGG4i5{rc!`)q}1xhC0+?H~d zpl`Q0lSQ|@;bZXkVj-skHhH;QSzLZ;Xyhg7kNysxo=^E^7tZEhRZku+6Z=Yiv5+Wz zQDeD`O_%{HWtp@}eG#}@5Ug5u;th2M6K#>{6I~m2MEAJIbG5$dlnZt**b1&!NtT?= zfa#8PuEVZukppcD>z6s{QREYce4pLx@TnXPtr`gM1+Kn$d?mkDL87n-KA|9_y+keS zrUG~AW2xPqV)kM!iDAuZBlgpSYMaOrCWX10uMgVk-d$g=3i;tFg{&5w45ak^4hT4= zZN?G-{GM^$H4B%$F`$Vflx~}@ye3^xq(-k2Ki;{(HtlmnbEMnFc&s~Oq&5z4UZ%W4 z4-tyuSdBj6B~6+3kQdrHoL`+~e-aX%IMLCHovspF&8)V%&xbZse8 zU#PdnxVux^vRF}E_Te}^2f=IB)H&(x)BHdrvQ0-T*FYHY8j%V7lL3{7yeXzC7^v9V z*&ixMb~?jQO;zs}{EJXxCgrPwe-`WP-1s8+JGRkiSGW1A13Z`0IVFY70O666B$5k6 zFzSNyo@$1aJj5EHur7T5Lt%4j>-u-*Q;I|>HX^}UJr`8y!_UdS>FW>9!A*|q%?p8{ z=ZSrKYc&JZfU2?>WxoAK-lwC(`y8gis1B{vMntpU;3TjXc_L!zxe+?!h&F3E6l z@$y%{2GyVLA7!OTb_Mc_)|j)85fHFAlGc1#_61r)3S}cY59g$9EnFujClZKmM^8{Z z`Q)7dEUWOz>jBwlmWwZ_%Fd9NG^|4P`2HTKRo|ZrJtex{Y~N2#M9*_2NC8!jDO2x2 zgboY7v2&|8#@?QR4ikZt5&JB%Qr}?JB4_kfyA!J6iDo+pCBz6k?Z{%I+gcOQNjE%& zNxec|(yZARPM}X!3pxKC)LS^vs=D&K%5GA13gn@uidjpw-K)JAxNEk!bHWYOZ7Oo&Uz1P=!VRO85vA5Vzc9|OUXRqt;LK{Y* zsk;DWSir=C3T&Rotkz}C6}K!THXgSRZ7Mo^$YJfud8viEJb=<)k9`x`icz;d6d(vj zQ`F#7s|s6y&ojK-EbqF92GtUz1M=8g9BFU2^oRs_YeaDcjl;Pvht(QIk{I+tatYef zMR#q~*+(vfPaCi}!E>uZ72T04xATx`;DeBlLDkBsL$-Y*y!)5?M!4{oq2}hh5i*xk z^{`VZ7!>M>VPq+E3>-cexvU3c04>CAVlRUsFz@27G4@zHUC=x`%0R<$gi%T6o3?j;Ss*{sH>J{e48v521cJ-tbl7AonwIgeKM{lpKP zke~&#Z5%@}wK7dfeQ-NxJB1hnAsVFEeRzJP(=~kaC4lgh-z}w z!x?K>hfbl%Q<)UZAntt|Ldt zOO`h1p|+I+{x&uZcXU8ZQuLjjJyaxu-9K6Y&Lr#%K7+#}q}2xSzM)f-qb;R{*H>3+ zC#n~_&t+eS#s^~9mm9(!qSDetDdq+h>%2)oo>Rs{;yg6)NsBTY(-1p9WNPmAYtnrn zO~O06N8;$P7IG%*WqIye?eB=2zB!E%4C-&w6B3HQEZc5=q`~;ZOK1gr2{LG9lEA*& zYos7R$&IlF^SOjH+Sx=}ahs>^X zl;=h?=nHL-S?DQ3M>BIp zW&0IhtkAD6&JjZiM*L}Tw+6T2576Lr!J72$f1e^LPTa&5Dz5;o18KtO1Q$*_ZPV@9 zz<2HOUJAA>*ymIbK;2c&=sT68Zf|!%ET?MEG^s>MLRTR^KaJ27R1XQv+)77d(B5lr z-&;HVV5IfoUUXe(weKM)gj4^xVOX|D@%7$)-efrCRl8P$L0_k!?+kMR0&ADB6@s2K zPb2tygS~_XN5)YinmT&aI#Z;Qodp;?Jb8KU7HQv-z9LdUzDL=ot#-`!>e|)P`ghHQ zHJLSU7PoH+Abz^x8`n`u)5}$CQ}8{*rE=IM7K%+xu=l_|z`@+_u5IOz?*~Goh;6o? z=!mG;N?)Je53_FSfAVvExK%VB=5pK`#RjpBT zTL)&T3Bvv6Sd1o~US)_zAAN;yMH)R6uKey{dH;VEHR)@mWO&*UtiF} z@i)U^wDor62!E$@03zxd8y2#BC!CIqn^~$12E7gsV*3W?qc#-Nf&@mNN@9JPyLjf+ALyPHp z{SKJj_rZ~?>HV`(rfxzAKRA6o6m=qM)N74fb?yAJz714n5}TApWZ??6?L433b%a~2 ztp&xQ7HV)=n;z{6ak42B?Z8z6fbIwybFq>|$6TKoTBV2L22786L+~Ln`*W2>+h-QN zrlNzTAzYWqMj5P{k`_0&2b2v*{me<@{q&35`=0U1(dOLPsgmT`LZO-Jl4)N#i&NGv zhp@Vv%4AJB)&B54Lv7gyYu8othmZ%2g&P&S2dZ6I-wEQc(tU{7tP?aGay(ZET1}%r z=sT;b-HDEY>=TGeUlqksH5esiLX6M^&V&|07>;l_0*NY;}a9M?RZ&T`F(DU5(kE#z5ymt%xOOs zOlwCBXgzNESSZR}@2eI;c?pq?LBHZ^b`n7kh556SNo=4nl=@`Y6u=L)xuy9^r&uQs zsYIhik(R@q__cKlw2n-`%C|_hIkPb#YF%tMgdrjSA_G z#PptV--IX{g*gzIwhHc4w#740+rkuN5qP@NvaWAI${)=vl_Pu!(C1aZx=CmGit#Ns zh5YWphEK)XvY$RsL>#*6JbHL=5Fn_-K{^$xp!?_&i1mAH9@AEQm%>J?C#q^5FeXXN!Zu>WQ z5dh8apJ@54UR!RZ4huN$_6)FTRuI?Ll^dGBP({czwf}t4b$3G5!tqhV{8&Yy&hcfx zm3ri=dMpPQ6U>G6ZGDwtrX0)$$ubWQ$7fsOH~{88!F%0M)WnbqWMgZx?_l5&w-fzc zr+L!yuaCyNEY;%6)HBmMH<2U$bI#)M;){dgxm#8Mi$c_`tl zS8zL5st>z#R3&!Y4+`&tp$SQql+zwN_p67Hek`_o@>2n)7gD#a$Op5)-tg9!D9@!U z@#awx`z3>)H`E%VI5)S>6|^cUA+AswVCONCLWL9)m(@#QcBQ^p%X*WfYHE-!F0eil zp9gvI8A~;%-Md_@@v>8!5+1$wT*}}^MM77Orir)*038#(PGtNZQ#pNQ(7kv!rF2Pt z{q#=bj`Y5L)~Z-r`awe+{uhc3m@vz~uSQUWM0)S(OGc4S(cWvA=sR?qc;-T9Qg!BS z(b_dE8;Vzv+pJ2S%$%`|2;8KUWJAaMPMiV4IwesriyNg{nydq3ge}_6<(*1>2 zA^a~c4WJ2b;s1e<{KtjXT8RkZM!X!TObGw?O5cGM{|D3d*XQANc@TlioFq$8f&Y6V z@z+ncx3ZwTqBy1tRrZg5G6RCQn%FXu|AU|g>YhG`$HynYu3h*E#^dHt?CsmQ{#-a& z*BV=Q@5fMdTwLkk;NY#pL#bB}ZlE8?0LnAn<94XC`KD4N+Myx@m{|LqT$f6|oEU*Q zJ3Ggwr~42@$_JAFYXM;19>Q{PHrTEw!^x_ol|csuMh|OGQL7?ku&;O~JD)6kij2Vh zWYo(e8Oh5Q0x*2{U&vccyhcy`q5(g`3;=|=0?GS>K2`ikEPQ^-j^vWPfj6&rCRNmk1{^*^W8rssdAk&;f4`Uk} zvExOTjMo85S710L4Ra{aJH6_AdU`D2rlEXQw?3FA0U(xDv2k&r2%}C8$9{0fvrb5N z*DZ1L)l%>V4(>@nb9oQ2+0?cXI~Z@F02GrI#lAkbO8A*|K)A^TTAP3i&=U}35i+}X7a}~%YJN}oWX%ec)}F2xN3b#Y*wOZ6s@1!?%bMB zA!xXs_U3nc4@ZmMY4``0gH05f+5o63%IQLdP`<}Ke&yE3;ZpvqDP~I|Y^el#9p#YG zwuaVtI;}!3K>uqWP}0f+rZTF9&Qy@Q!1SI1*#+5Y37GzIIiCmvA%?u^+!=s2Nnj6# zOyw&WO`XAGWl<$zlvYo%EL52ylaY28Ju$RiQrDPI2Rd7>in?(&u$Y~r7xwe{5Cd|p z5FZ&|c9P2gb>narlffp#feJ|D0>D=F@3T|S05eAl1uY2)<|&)bW%3S?ij2%FGxY|k z16=F(~P)SyvabGXz1GYnb-B%U1=hnG&tZi&1`>ndHSTD$_r*ynj~thQk*M*F&$h znRm1et$@9+x2qqlA{Y`HgMiQVyZg<7&|( zDG67=W>u?Fn7ij__jO9_kU&pOE+!@Y2GqN_C`E=QAT5JSfh+LnN8Y>djE9F;0M&Pm z%i}13@d?a#ZB0JO%Q@Z|uGtUnyMGI>)$T1$^T$A!)nrMXCP&a++bE5^Z0jp_-&U!? zObBp*MpMyNmzbh)`saNIzBFsPWKn~nKk!>T6gVS>SL>~LacG&WlF9G;G+s#5*xB8I z5u9?KtPaN1DsbN1FNP(m4Z7SL0pwhVJ(x=+Age}V%;lfO_+?E0{NgHa8F$Pk{OI#+ zUT-_rcFw)4jBjDSvyzmQ zoJwG<^G!@0P$DmIO$7AobQ?+y7{@ssyjAn8OyHy0O(k)7cNpFGL259P{f#w;%52mn zD^to55A$^ipGWUCH2eT54k;yaIYqelgM^@~3ZuULMVl9~RnG@jI-PS0ucs?%)xxwb zEI$<~%JR-$p%NOd)O1W7c;4r}hYo6kF#&2PA8XN{M8 z^)DXRdj`&5hd7lo0Tu=mmdAb2*tSE^a6H4lf8#3qOmjyx`|Q-|6PK2mLBaoz43WxJ zEi=rXv9_jCirdCfwJ_XcY$*ClP0r5Vj? zMb>9q8;Fi4ykKlnOtxhiUu!7H*>-gIOaFLlKST+*@!0)8w%#(Vj%AA$4H_Uwu;7y5 z7Tg_zySw|s-Q9w_ySuwP1b26r;2QjO_CDv{_r3Fe!Pm9gYIavu&oSm0JGY4|vt%Bu z@#Z5aj(y+*NNSfIO#QR7X@C@SQ7VV+&S#)F<#(d-^R2rNjnl--2MS-HJV?CbnTHP$ z@+$Oncbf8hgbq=+V!t_v1Z~Fv{PDy1PgmYNF8=HO_Ez}R0~K3SHlUgO1`zT>*TAPk zSsslSad|(%QhrRdoRQqrljk+UA=sb7TRqr0->gK+A*LNa=9*hrPIL{1 zO23W6CqPM)=g^jv45KMma1`x6yw6nZGic?Vjw7wjclcDF-xQCv>NhcaOO#@8H}=2oN367y4Hc; z*x$V|UH;)}jaRUdyQtKjH}$b5=JvRb%2hVMxkd%on3=exx>acZE>T_pEyu${8&{~g zW?E7V$cQ>K|Lv2tc@+^30M_aD6#D?Hbeg?Zt*@Zy`z_$+^q22-&QW!&eMADD28^Z~ z67G^9kO<1{&ZbPS9!QEssX9q3RVwoJdhUM90J)#>Qkjk>Hl(l5w2}?79bXriKG~f7 zFlw1z`&k3yz*jI*zJ;dwQEF2c)|$|Yy4E-YykVhYvv>(k+U%i=Z%E4H-`gcEWo`Kc z$5r{YKjaalKg!a;vRCHOW5X7&~zXdc9;!SbGGEv#UpZ}1&eJ6amRh7wA1+G`z{{>r|r1d}}? zGoQ!;kX}H@eTo9v5nEfk)&WEG?xt*E5tHCbk;&U#K6#G?PI`rD%aSJB-43iGH^8?Z_*81Q}>dF80O$LRvr zMp1y5$o#n$CyU%}VMU2?#gpO1R`;i&bwhKI$4@+L4vTmW=d%+dG!*4^i}!}c9K{C9 z%?N=OE`2UmD^;Px$;=tVvPsc{APlszrDUpd8@zm?;D<7Upk2+!MSW#bzhdci@`SzJ zsUFXIgH<4K;R@9!-W-Uj zG80KM6%&ywbSB#A^k^?m=9Vf@2C6xi%9|mWI^`LkPDh-?M<&0`)oh*6pG+)#ZH|x% z*DjN`{I&ml>B@8e-QnAB%2!QtuvVb)1r{EhYF^I*`wI*3O8=Jfg zv!vsweuMJ4qgY!0mSK~R;>cfZs!)ONS?w;r8SlTN9~SsMmv3X!qExO#>8xx#E-srn zo=MqQ9sem@d~})0?ytor@cEtLy27nLf1czDaAXdV&NnLvq;&xm6IlH_#eY z`}fq~)Z%ZQq~+?wjz?>8#**$j6vFtK+9$Y*;7J{!r%wkLI&$+Nw*2(s;xlTITVGh8 zuC74i3>DMZ<>^cSWCj!~Fp3`K1-jA2Qcg)kuM`r06oZ7%GcJ|Fkhhq$DkWFM-O?gTLRNUy=%n6TQZ`^=r`hA z-r`o;nKEp_6GCdW0G4#vmzVPU#Zs*-eB7(%l^`f&WwGkg;peWjN7$AY_u^}H5};(& zC4O58W zNwZ@1n7Qk%^R*dhS1!TLO{u8HBvIDQ>wChXyPI9Yy<$&KLLzBKKKbH@C+d2fd55dK z6kFWGL1?u(xh!;Ka{9Cx2PANgTKQ|~1gBe#UcHqxCH;QWZ zaS68G2k7a!t;qQCMd3bYcslHZGHAFF zNgf+*kXG-=I~{=NUd!K%MfM!bl0XanDHj?U0CTZd>m+G@kl9~%-GM97TAkuPTiAn8 z=LOAI7R9G=Nk?7|2V2iP$B|7f0J4-Z35CVQ#_BcTSrV=S&y%S}AcArQ0;yNl*MT^< zUxHCwCGly=$r88gtXVT_I-LrMIPY)?#qZt0idcP3vW?e3LMYbwoz1@joP#QE+tz~f z2Ia#xYkW2!HX!ez8D2}crP|@fG4OG9A++Vq+>tORAgzt;mB}2sLajydY_M~Xt0Y*_ zcvK9iL!ZCDF1Rr}iC<)+=&`u#(3tdfgOWYlYO;}@t2XVyeR^#ofc@RapK8Yd@ss&g z4(d^=VO60>@oclrztHpk`|E4c_r`p^-7kAO^n4_JKG!j0;)gn;UuNQ)v~z(XM_DDn ziBx07wz1ih`Bkt&U7gOF#q({L_|>@&p|9qAy)I)^s?gC~rTCjvT0_p0oD@PB)hBe# zjc#cwlS$<^;fOR0a%ZwG0Ou=V99;^vi`QoF3Z1FipZctmk-I+D!7y)Bu=7~wO=SeR zkm5*jXmxy^I2Xor+zZ|Kc2w>*#E5#d%t&<0Ma zan)(9|1h;k?|y{!U1LD*!F`_6yH4EP?WULGiN&d^7Mt7N35uV-^x(82v^zFW&A#4# zPXT5v3{XL|r`2XYuKSx=LIcatNVSYd68tP{r&~jA=FF^W3W&$&l>cB>(_3!wWSno; zbDg5*3VNkmi;N0W(xC^l1jKak*lVLc``36JUjz-nS(q4^$XVTwOM=W;7eoZ%KWj%v zSfos?wUjC%%6+%`YSG{`FeH0l7Z!IwY0|uD)5bD{lH(oSce+x*qc&}(QxjZ^(j6N& z?pdo0_q*%r(Ywyh1}OjC_X>6)Ahx@_zfu^`_3cs3{-N3FA=i>ca@}Yw<yp(VKQYI zlHolREMd;M9ST}mS-BWU9lv&tjX4-;&0f$gO|6VgE;hh^LWu8tx=TZ$4%8b1A4$x4 zY0yXRS%F>@sz^8f%0TU0)Yz1NTyA4Z@FfPJWLhzdhADbw!mFNaR~&4ycH9i(XxK;i zr1{=h_2a$Pn3&QRLRC9IJZ>e==2cHz&0o*0j4d_{u@P&Lq=&U}%dcm~0+7wEUsp9S zsThC36u~l-2Xa169hVa~ZeDI8d0`-fKXClb0NzoNzXCcTTa~0A7V-Mm;uEuJ)_V(2 z59I~?{$Q(0Zs9A(?b`e`xL0DIfX*eskkyfF&){&4kB)$^R0Wo@^0duNc=}DW{X^aA z`W3pKcv66BR+(jOY_|K;+39KdTDCk}TpH=La=(#jrPj{|v+2?fj`=jL--JJV_^BrV z=zsnp)*~#e?U*Az-9w3j&52dN4j{}bN%ag^k1bs_F=@1jD{%nwuwLHjJ$4_}zrh$7 z$>=PN34dvL-;+9~vL>wFrmspxjv33Z~S*;0kb<{iI4_{nA{C z^@&zRQ&+`1OVo|M80t*4Ek8b{f#R?v&!m_VVgTygbdaJP)z139Z)hTLG|^!HSD1OH z+CynPW4sMfM7^pF9AOge1KrX>@tZ{1790gkt86cJ`TeYRj^_g5Dwb|7Tltyw7JBmQe z!=p^#udmv2#fA{IKH+p7LG3DPg0QdI7Nx7z>zNQ6TO3PoDu#=Abr+lqwZGSbu}Q8x ztDeTQMjB5NQB$P@rIb}zjb3(*!aaUR!)Zck4zpAHqJ?*Y2KObpkqwEwJF zI^AZR|E^d-;Ghf~#)Y(^EYF1YcB|GJT zgj){wpMA$`4OMpgCrc$sC##P)CgcOXZmg;|By4Qyv%)4P2JF!-Wu8O$xEIJ{Vo;J) z-ZuVzy0K^dBak!FLz zSR(nfJjTSIj=V4c`|f>T3Em!>XHM`fz(oL@ACjl@GlsvLJXD5LYTS!E z)N_-mfx!LLwV4MoLDtnJSj-1P10FB8uO@p0pNnU@c|ZrZS@Ro0(RL}Q#39(OH4j|o z`C`l6Ov-vD(6S;_gMYs04?$+1?%5i+>oSAa)g`9{5E|2Qd`!&AwK%+fK{j^Bl6@t8%ms-$E0^@L* zf)6X|C6wOI&)Pa9HUyguwQi<*lKQV zl?944nk+}%I6O?N$N^^Shlh_BcjG%hk4Xq%t07S=c-S^z_HesVGWuD1O5WW6ki zVYpM9(`EFOq%%`@M7$*lxnQK~!Z>IhxZJ$O!H7fxfESNYbknxC8#ydmq77l94gmF*?uTs2ZgBH+SqzvtB5 zid^hd6)K}*;c3Lf3mu`;aPvKbf4erA8a$O6ezeL9e+%qebTFA)^wv_I#ut`(&R=n3gvvdBa;*1pEjR+}rvA3* zuT~@U2fk<@;SpiNM}avhV*1PE!|BSPJb|;ZXtlMJS>i!_J{9%&a=R;y`&OzEJrcMc zSEth*iZL~)Sj-c4;uEw9K)|I$N-p$+`ee^(H7%A%T%r9)ix4t^YUnJ-=z0Rc!eRV*N)eBze z$K3?^M6+J6i5!l{qbT`yilIsn3JMBn3L&p{=?RMLE15WBito&4oF_{KtnS8T+0`Oi z)di{CyQ~GZL9bScG$}*cqu7)fYf19*rW@(JC5(*pnWr(*M8&D&saKykeN4wuQe@{p zisp>LI_i6;OKUw9U;Ft9#uFplQ71pEK$oPtZajNNyjos<((ij49aAjYXt%0?l~egH zpX5KAfUU+Qp*;rczRtHUnHQ?v1M(j?Y@7#ch)gdOIv(;&nv^?)@Rk$RwiXv{6OUeb z!R``?64A(H?+j>HWk#@?m#0~}>rRCEp3}4Trqwd?r`(Q@P!dDvP80V6|D9G46&zqg}q%tZ#RQfh$}y^sp+KwH#9z z#h5mAorW+5Q7I+-(B1_hGh(aVD&IkEjWO3>%^N}Ec03<)`u0E)CYHLk@y9MA#kMK& z4`Aq?9kt;t*jDe-?)3^PbXTlXjq&$QUUR$gViP9&5NSA&k0lxN*#Bue85aj(77r+` zo#B%QgAF2b)G+U2Ji7$a+k5f(oOCvY@H7>SXTBL|?Mr5jZ0cAm)4{B^^DL_Kz0`O$ zCC5@;_z`J^>f*t4bFjxzk1iS#S?j$?ikN(~>8jG4@XwQ&t1u=IT|v$T*2@_;*iLA& zbA3s|U&@7C)y^#T&bTeZkM}h{e}<$cl4UFZCUmiy|K;XVdJxH}*oIBt+jZJ51%(m@ z1{~-yT?jA?uVEfj0URr?BVo*wkBmFi_Wq~BZJ2_#%6$=LwI{e^wM1hGCrt}pCM$`C z2V6ZV?FVKVW>Fnib^$H?uaWwJ_4g+~QcopipZ!D_%^`6zoedV`-m^n*nXJcL6G-@1 zDi=1O2&Trd-^RH@Mv9b(-A~VmnlrqSHJF8;jNgX7N@ec z{^z|dGGgE&)%?5t-@#3!y7Co)-9JO(lY`SmdrigTtz2f)M?XP<{j=4M0K9i=uaiCr zA2@EeCm$hFxd#EAQ7{XaE=<1LKR9^OAA?QA>jans={BMZA+r7dprwR zhEvdA|IhACUxD4NaBPpOfZf+HAh&iF)EIG_n@@Q!<;1tf>5^VpWg6<7GJ+I4-0+_6 z4yzK<%uTSf6xL5UW<|uP{tXEsOfnH6d?fofJDR^6_(%z}S(Y4H>=gtJro}>FZ@W}B zl=$16EmhiD1W&FfPkBo&e6~SN*SucunL+S41uT5!swI>ORkCUYyxu&!6UT#g4L4(J zdkRzvkwBLd{&P?#5kAcHQyzgC9^m4C#lYw)2(F$YOk-qqOv>ZOVdLb}M5O+hy2_g_ z?Xop7aD@Hb3x@`u!(R6`7QKN_XX+pQ?{R>ca`p3PT2PB+e?iy3FJSk?#XXI#s&=0w zj8}ijE+8f{g;D^+Ii7}@#dU(K_frttz)b^>e|^Czt0_Td2N;6?MirQUBmd67o{$Bv zz;U8!kbT^iq&9NausA%9J^*0)%oK7S1v$Cut4$d0zvK+u-=~NWhKm{o z#{Z@|JpWb@I9M$nIBUA04WVty{cpukc2_@e2!C=P?;>e@6}<^@gY*i{RbMg$l)uCO z-*ca%fYtQ0NNf?O{c}_IpMB8wg;yNy9POKu6r+MUk6PYe3UoHdVNV>C-5gl&MDhPm zz&XDWqNJa)QO=M8n_Nsk`w$Np8rXZc2DJWgv-hq}RR7N@YRSMK0($`8BO)DwO-cR_ zMy`9KCJp7G36ydL;-svl%BW%i|D5~*7OXh2pytQE7xIana>n>{+vfa+qi19zYyA&1I4Uf!#k;kraQ2*t2Z-QP#o(kQnL?h$#3yCC ze7gPd-+C0361`jbB>A#(cKaOz0d{2IYCP9MVPEnld7k}(5C~!@U}TcnLPp}h8#I)E z&uEidtX#kS19)=DAJ3#b_fCc~+(D0q5b8pKVWxiCmxRn_NKAC18O4d>kX2Eh&d$jL z@M9fWX{7XuJezHO#ROJrX8GBljX*hvxq)n9mP)017#s#oWm4KC5rO0BT%35~_oCqS z(U1bKbqUu4*9GlYuTAJmW^YYFt>bn-uo%&qE^9KpJlrJnV zHCKxLm$=62J<4!)&XW={k*@@ge;zf&zV;w-f~!qz)dLKJxFTu!z959*KBC_(4-cDJ zq5$kaHB@1_dasxxAoR+?C|wH}sGBCmUtNBa%{P^MdTS#nak-ibZi+*4ot&xE8I>pf z2#k5ZD0};Ll6?S+_J!ihXymEGM0{29OzZK$&L!hfrYc#Ls$!jz!Es+Q5OU>6kKRWaf zK~1iL4vW0KWV?F|$Pr}M8jZ;5>wg2P93`tNg~F5-%T?*&kdTUeA%dr-^F$~pC}RAU zxk0N=X9>iGh9&Y2vD|Q4Ep|ftzo+XkESIVyu?h$0S|n5e&zhX2rCEp9R3L*XDohT) zf|Jq_`p7FNC^@=#ja=SCP*d{F^NiN-==p+V4XE(T4?+P))JpD$H$*7IZG=ND#ETeATWJDT9~^>=g=C(A1<}v{gy~8#c?}h$yC)NG+{q zf}!d&(U&P<6V0c-`xKKirZ{gsmkzSmwwAGelg?mR_og&?(ixUFDzSPkM8iushsG`C z(rQE6<05-pbxsLC3ml1v^w6KL%`C_+OX zv#egrBqS>cW8`S5fff*!Eam{w+c$e(NJA>5)Fxp5UQo=@X9hg@6fjF*>Uc%>JUm~I zHP^7X3n8QUJA%f^pDumjEq{RPZkD@h3{v`Cl+~WBC#I(tA2MeyQQ?^NjDb7fP7`Gn zfr8!6*(U;I#tT%eI?27V;n@pFloHTGE0rxCxYv>dCYAhp z7(sXBCGzbs0W$*!zfn-=+_bgMes9L8y4eY+cpe4-2tYvE`iCB9Ar}vwQZrs(IwRZf zt57lfqC!yCKzuy$D-Sb4L2<&>n*MKo9zw-_JI9+LolmAX~(|0a|N-CK<7=d%sSDhAG z6S_JS$NQCc(*Y+F3c*ZFIO(u#==<<;RV!olYTG zi$q{cWpFs=_)93!-ppo&2SN;UaxLNr1MN^y7}+s0oy8ATA{Spl zy&l(p!qT@UydJlVbfnMB#J2L6!)7EFR&etnMETrZsd4L+YDtbjk=R>>Qp? z0VA8<-FczN^OPc5+-^vu1dVbW4mMSAm5MFUcc+X2J1+1tnOp+M6&&(fR5NVUEG%Xl z!c@|DzZva!V~3d5=^Ce3hk}*xvsB{D>Ex65ki2gxI(V_*wm&T;EmOAPN_=@n^77+` z*&y7~?$WbASs~K88&Pdic5+$1Zr@Y{a}0Sd5w3WG4EK3eGe)D4E2?}g)_Oj7@Wr61 z(iGdT@e&xC+Ng_;V!gs+e-dgG8J387b}n&maPHCr=^LMU+2!jXXl7b z_s13xI7-r2@1{e-p_iuNQaeHF1qb(=<4)`U1V7x4x!!TeS1MQ6nYp%L@AJcGR4F%y zXpq5RR}c^~EpQvFe>h@RowVix+g&TQG+F7ij8nI%erVTS>(*Q>SqvEE5grhfnAs)D zn#BmAD;aoIt8)_M7<@&7fl+*AU7$E@K>}+kKehW2_Jz7cb(HFdSv_bU?qsovx2qZ4 zwW0TRMtI%zPv7GR740#VMiei7=)~XvpzhI8p4w+!|(oZCZ`u&=G#fJp(=yN>2!dkfd;$@0#GWIih-cHVljYLTEgQ26CM#! z;Oh@P3%s|AsYRLCLnnNjGdn_Ib9aT~X{J-%zTyKD@+p)t5ZcsoS! z=)EmxfA7ln-1A8&6&_wC^<_h;gnTB&FB!Ni;3 ze4XPchcS<~?2J#ssN{u-OeWoTjaDD&%YNwi$yo)d%gCFNOQ6Ug1Q)G=c5oSWL^Ti@|Vs4-x zw}@xVOp2#b7a_Z*p= zS3^Jjb+a=NKC6<|RYbSi>B!x2c}+|?F~h<2aHL{tYW8F?vI^1;HbIRo6F4>GBY*f7 zZ3K^aYKdJN3OJN^RZMZ)AB5{OrLsn3FWVa*|1_|x3W-7nBxu;x8$D9+qnado@LR47-F z%5>iBmy_9C8giC>F$+K!D<=#J3NluW0MgR>$nME>Y}m4PXps2&kloo^hrRF;_{xC{ zC6z5lW$WsT*0tE!iqqYYYdiC1nwv7E+I8R0);qN5jf$O-BW`x zvDDi#?}b}wbuQbI-0aCnEK+&x7#Fy?Z-M}uUs{Z$f3tsgF_#uGPQ5}%OYq?&lT>|N6>kuD`ZgOXN9yPc| zNRS+s=^0E(A6tJ3g8_CnxoM0Y6}mS&RDlAQJa zwuCcQ5aDvlw*zvJDqmJlnV*u+ms-pRtsliV=YzdbiNzM;>HWoefeF-sNYsxSN)Omz zEZjZ%Qsv{ob)Ft>*Xup{j28atLD`g|1_#gd3mMZ$zLlD|lqK@c_o$kcB4)dKof>Gf zIMT;iMG9LP!FB3rrS@_;hDq*l6slr-eK71&^JWuE?vJKPwluo%{p0BI~!r_j^T%vTq^p|A7j^dhoolImB`=m4(-*7SgY|WDbI0gau$D zE4>69U>jO`g=To8$<3#CgyI*8XdmnF;D4MBixnnd8E`*%SRH|Uy0TEJa`A*=7Rt2* zF=z`Vn$KILDYkO>Ka|;2Qx`cv~x>B-%!vE|d5##sMVMI}raU+0T7OQA0lR2*3Zof^~k*ln&!8j&uT zAF^?^#y7s!%Dl?klp}HKOHutpS=%AAUX5+V*-G9+)2pLNeb;n-vUGxN-GrbD?66H?}Vn*e?(y~ zmedbSw_;=)8#Yy&Lq^G{>T=PcPqpN{l@0Taw$nwi2ytTQgHht4W2>%P8h}qHfin)- z_?&rE#Gv31OZFBmDu}Hg=47?`gGSw%A>LA7+Lu!iZT!IpT(JDfxI*^KxDC(R`s@Q{ zuk@)EMZ_USmZX4dgRF*;sld0i9`25uKo)!5B!NyoMt26r^D&RZ4oxsHka8{A{&S zYK1H%klja#8_wSgm%;d(pk* z9a9X2T_FvhSBGXT^3%IN@q~RDn?m)g>CInbuhnKe`Oue(HJhhdj|V-rF{aC(dcB>m za>U^6&sR-GPiAmzqUwnR53bk{i9;;WxPl-CeG-fcLC40aM)M1M8wZR6&Wo-be~^WW z?nJu|6p}!FToGo}mL?pbVFb)(VP5!S{Q~mF5tnZ*-IDtr%4Og^bX`U`%I}T6Te#tC zW%-ib4TdEQOoxpp@ve{aYFfv*oUl)KNmaMxq9zu|PwNHl-pt128VaM}278%{T4KK6 zHAPj{1BsxeW?-P}*d*+k1iJkE1?we(BTfEzl?*#^B(wU!e(*(pFfKtQCXD3zq}#ht zp;qb@jo4dh@w|W#UQ@DuOk^zYUAIUZjBQ1RoS$3m>JRy8k04ryd-e$RKCb2tcPrD` z)^WQghzKd}g!ueH--C9jKYcD`0_vu)j3-hbhj>W@5Oj<^KX8>pV0y{ecW>g~g%N}~ zzW2rg-~Xoabt@LVV?2wO7x354czeFk(Y^{@^aeD3Rqjb?Qk&}4&uWb%N_ff|d-|{y z2e*p1b}6#1C4RM<&V%%D!6uFNN0UsT`z1*H>%gXbz#x`G_nd>H!1Z zS_7XjEHBxr679Um*dIf6$nOJTS976c9haY#67W#EZI(o5JjqH=&;&Wb1Y9>4pB9K%hHZL6g6per zNA(aGKczwyzcBPryN+s3a6R-1eP9@x!iCeR$jxb(bs21a0SlqUVdVHoH#bu~+HS|V zKEv%cfQrbm(1?BzK}y!tF?#+aVYu;d)c0{Xj6|7E^rQXW(8h$WcO3u~Gga9pj`;ez z4o!Gs60kO?5hdsTRzCURCAsxby^ak367_>XA2wxw-l4qqBXo-5Xt%|OqMdSKlQ7x) zjdf1NWsyA9^>e3X)$PqXb52Iy*LV{Kwo=pn%=ec+oK^JtG36kYQ0`WN(g0YGkBOPt z+9oilf6OfuW+F+{y74D4p%JhK9%QtX!aGcIz*qTz#arq~5xpv#cAFJ;ALlvQ$ESVC z=ZXTTN~zSQgXejryD~=iS$~xubWgwtwszCalWX)*nTn=l3|js2b+hb3)^j7kEywuN zUCXieYk#An7VN#G^K(1qGcum1PlNSKLXk)^wQf&KP)aIg`&7@dU)`={xyXe57jU(2 zJ!T8ke$rR@1oCizp9P2n0f!~s>*?|14Y?!2YSPxGe{uTD zm)X@%={J)WPWqSFQfc?B2zVT-CEgv62fqRTUWK^|4WxZblItz7L~74`jJwCfEM(jd z?RN{D7B8dcbSbVWblZ5I4TJJ2yl=M<2)J}VJO7-Rue7={j_Wl0P^ygl@m-{9_PjnW z%Bn4$R=7QHL!A%4qjoQTydI^U1C_X{w@j?Vq(_G2AV&ede%j}q2>7BGH8(G7E#4b) z&crgIoOSBxnBYf;v}#-;QYTq%Pt3h=ZwFFkO9-}D+&T_({BPgWWu+kj31zV~mhze1 z@(f)8IQQ_z)0$E;nOgdo%?|TVJ%|HxK*>VNb@A=YZ-S|h)FIQP{pwyz2-G^*-O$r` z@ek#{7iW()1M8N{R^<{PQA)q|4c49j)$}8=iJn}x!Q7F;iBHTX3rsAn_}N@4dPX>u zjyI|W5*m1m*Vs|jeLgDfePoeGGv5rjCbq%dw_lFuGRZ(4OKGWTZE1Fy)t;-+H~A+k z@|)i8$5bbS__e9E=EkPT-A5kxW1BPwi{RTfA2K_&Jp#LwxO~G6Fxna4-_*hyUDgJ| ziMT3W+e{+|Tp-Wa8s+GFX*;>(%tX?4LohJEx!0Q?k~&SU%SOb}pYhgVIiIg4UsaJ0 zqKpk*@OoFQ;__qHc%9iXPw!9EDFk~jM>v$%z_cA=_L*st=8MYI35IjI63VR#0eT5@ zH!E&mY-ECyN`4p&rmVn3S=X7om2M^_o|C!1&QNs*qi&rK!oQ7Hbqvj5sxTumrXYin z)T~;$PlBy1bu1FzpRlTrn`}KjMQs+Ax=W(#|M5py3^n?+9MS`$?CB8&465{KMb*## z58i&3*!f{*;m7G}T}DgqaN!tVv;n+$@*;+6@YL=t`dc&^TmG+#(-l@1O^N1Y-77HL z#&JN&p*+t*o_^}u&DHfvf>~-)f&bHDq?J`AXaK?eM)jM7k^Wt`rplbWwgKjkb#bKh zc|L%AaT>%oj6xER_T{XX()!uy%!0*sdS?KXfB{|V)uV^z&ZPx4Ydk0!+Mka=DZr*X z@OiQNw+}}M!_1+PRfBTtoz0TA zHp}cRtIXMG*%e(kYZZj2AI)dwtnuA?1|ZpbaqsciZo$B!?qw3y4FR%ewJH-^9v~*`_R+?1o!&7@5VNH~`A=uu zb|jijtYve#L))5}v07Lao0dqLq~O#;AmKW!1^$CQ{W^wO(zlvol|UsEd9t{D*%x)Y zVY*Mo$!}s={*i_9eZ|SkphU-|X*G5wxoJAZ+~l%)&UsvUB!vTTtW0@XnBfRT{_;6z z0d2YzVzr98=GR&_BAw?#G7im#x<)!|Q*cg&3BTt}f9B$zt@7aAaa4f%%x@(=#_UlzWSe*XoJt6p~;5e6POhLVwwLr*1vy%3xtp44j|V4pB6j#znFD$PHfPBP|SbM z@((`i!wp<}O>lJoKPdUXoBq8)zDWLVnxT3{?EhC6{`Y#iEg^e*dw=!y`ClK5OPrto zX^Bfq!-4fDYWn%Vy~75HkHzrQ&)*+FReyp*Kn!ngW_fsch$_z6t;ve}{|8t9zxM*D z)b?(v3JUlBPYdDDU)Vbmhrz7+e@>$+$cKq*{!6s^e_Hm~{)Wd^kQCv||EI?w!OZ`L z2LTKA|0g`bf5T(-E5GNzH~k+(8=x{E0O0}r+P43vC3^Rt@F1ixss2w8%KwIk*^u}z zUE=@V?B7Si4)H$+cYXA75_w-QFN=%5^*;9HkL><>vSQ0HIEYS8BFcFjsxqK0gqC zf4FZuxqjI6%K7E_Twg57-T4H-2%R4I^|xK#X8WQ0De8@Aq<;wn-F@MFiDEWz=#)Ra zyxx}K+WsPhx^Lao=BQ7nzjxD&_DvrA{Qdl3c>lhpHt@df_}XZS#l-uxnBtG^9>+i_ ztOGl9E5L^^D`vIWISChNbSwFn=$~p!y`+we`S_JSGVWG2` z0!3`PYFh7ZD?^;ig0H|668-%7^7fvLpo`F0K9-z$|J!=|@fznbDI=GzQ>Bd;Z5H3i zq@=X?4Aj6amnjpVLwtNw)hRcg%YoLzj^@)+lB~F<4>IiTBe64}g z@q8hHL`TJR|JsPj{os~!#uV%hhbcdYNYW@A=apEi!!2-4jk}t^FG#VAp9nyA7=##5 z>up~-)Z3g8%3wDr_I)ACq}mYoc6X&l6Tj#1dOar$-4!ZB^$ZqiJx40GN#lu2i{;>H zG|g;!zs2z5eWxymygT2_Dpf9%2GUluhYCCkHvskE-!af0hKmk*C_(5&DvhN8sJ@y( zc6{ohRr|JqV$_Z{DD&jA7iD~2ymorR20IX#TU0!Zz@QjN&++(w2m%&1nX_4 z!Liw|#Zq~#zt5XwfQl*pqH_yZBFBJmIpte-n+wd?WXK?{khHBhU|vCqrsDXq+!siv!#v zgvyjF3FkhZ%-69FWQG(t{$4myqf)690RG7%XqJ>f!$p&nN@d9dOsr1xy8RM4ebv1*$0Ip`H=!NfsM!v4%AC=Ny%)WMP6}EFG0D_=D7B~ zSTI<6@8m?_24k$Kxz41-JjqHC;5MvO>v3DIHS@pALTt9&yyXPl;m99f+Jx%k-S1N+ zO%Hr3yiac0+`vpLN;2o)(i7=s%_eMdcafhNsB^d*DR4jej+qyq@6zgZ#^Cz=k*Y0Y zFW~lJksIABDMd?Dv)D^84(&N645P$zNZL)T;bhLp9YyBp__@ST^6=z9-0>uJZXlX1 z*NE!+<{{6l>n-)``cB}dyIMGm@8u^o#I|O$jSxzQ4^_hB-@cK1%X&S%X`t>F1eyZf1u=utHrMDC5icF1#;ep0d9|@N@%w9IhTKllRV!(lORem#}Z& z)T4_;cse9+;te;Z|R{ZygYh?%-ofingy6Y z+(XH8`WY|lZO(E7;t59;tebLWb?Vr=(;JvLvm-5pm(@gDXCF!X{bLz@Tj=$ngkg9k^A(Fe(A*4) z1q94)>H1cLc;Kka7E>R@_W}VM9BXu1@Bd8O6JIa+h}jaTlyb1!zp)sP#f>zhN=5z) z15$;6RbtR;|0t3fngaBXY=;S~Sxft{93%yh(S0Gm3#O+Yltm)l}(wL2qY*wcydYp2;C~_*B zncoQF@Z6$~H}JV`sc7kYTP8msZsD&Ok(kb&Hv?z@O)a)#V|nuc4#T5>%(qvb!PwHv z)9556zLf&bHRu#z6V;Q-apCMpZw?3xF>!XPrqu)geHp&w+(`~`wGL= z598wE7WYvm{A-I?n{h0;iLrB)PMg4Ri0vK*W-q1UhQr-;hgdK}o~8YJij=+axU5jZ zHdKLL6pwJ=BVIyq!J4po%~<|SARR^`vG>)uGsw3@^D(qNx^^~sF6XAfd||4J_q}+= z>-LTK>8i=BbmBUJQhrVR8Kp7|_H@s;KnBd$cdo=%^btgVphJ2w<uoY! zjmbPn464$wWQHjgV}me;KXq$MmvSUblF$^!6eZP?+7XFLF1B>BIA@2l78}=ewXvc# z;HblSGY9KExzm|a{_Q_&N?hSBUph9U(F7$*h3H6A$SCvVKZ49>ZtB=)%XMi2f_-Rt zV2+T*wlQ|#pb;=FOZx+8`ZVh@^KC8$I|6s9(JEq!*3OA==NnWC7<&@QSVRyWW|4$Nn15@f4yRc4{IF#2P6T_4 z-6r4VrLnX~-s~nxkt_d4$l+#o|2p#_MDE|`8OQKaF3Swg16!x`@xB)@8w$m&=}H5D zwRy>+(HW$C{Go;bC+GzP=;ggHbHS|rVYWDfz}JE)bU>g; zv&qb8Fs;FAeN3QJ{5APdLaL#Q(vAMB9F;a+%+O>us78N)0lQxkg_uq%jVTYvJ{yZ# zw||g+ydjIO$)GQi`3?Fk>b||>D!#)}Bk%IkPtVP1VY=b$i}w1MfLgN6cW2LT>kswpO=Um`A^=#1gmTvv*HBKb3F zIR2^r0Xly%y1W0HQGZTlq#!R^uj?#jkWpP>pd5Sjl zoI4QsiR6T$1oLAy`=`x%(;jdm4L}W4Nm%rp9dYJwmggRE=2vlP zQDDy-n!~{1nm-9gFUpK;!y^n{*li*CE z*p9;o3Z>FanaalJb%~b5mCB~bvb@TRn0djZ50{R6=1MkC@3pSyCaosC3v6c}k@PCdP#yW{r#SV*U%4}v2nZuVTZD#+zW$u1ZO@El8)TPg7R zp#=t|iap`j2iDUuGk9m7u!6oZUDfr-g7e#ZbPUi;9lXHD?DJZJ)6lUu#xHz- zEV8qiaqj-W{S2m^+%j0&7|rzmaQD_hc`n_zAQoJL1h)hWPH>kH+}+)RyE_C3?(XjH zZow_MOK^AB`;e3Ge7|$E7L2dabqjQyaifY%geOeRTM8i5A&Z zvjruRWeZOy&}`}b?~!n`$T&Js+=ZYeL0ziYt5{sWJUm8A#X!~3AHFlc=ed~MT@5`& zRIhf)B@!6dsXO^XkWuC@kneY~Q|PwrdQDo8D^TUWb6u(ybA*iBu01J3CmVo-mj&u; zZVJab1tRha%lF}>5g-)9!j+m!9pol9s_4;OFlz%kO|?5?(Q= z5;~3TP&vxX9nZl3HlISu z+F(BK>qD6NOp)!!7K`PknuOL$07WOuM)-B6gst`Zp3-r%mZjHGKLUhr?fn-J*J)lC zy^(iz&>Vz&)5Q;XT;+YqGGUl!39v6|tzoTay$6mtcL(KIj^Eycyf$fce}DX~hjA3* zScm_~`DHkBw58AhoVluR?6u`&_NwR80OsZt+abBpXsT2p6~gt!jgoVGm6)-FY>hvc zjDtSQ@H+~{oSq@YkAd*GV`x;Ex|R4!7%WMjjJN^ZCItsJv1+{wlUU-h`u9rt4mOVy zUvD3>8uk)w1lPntsvb~212a?(bL$ww!spA4fy<>A%@3^XUuQFk_(mGA7oB_rmkgcO z6tP3_?|F_+5=AvI%Zf5>4kO2!GJD$~}TziN;lFSniNOGpVAA@c4x9d+b{Yw2w zoe`>{zFMbS=qJM%XFe`{iE5*$BLmm{z<7=BoelesV)qOJ&)c+IRw8301|^`$ua2cr z?XD7YMQd-!;9U{GXkrs}I(of8Jv9>_! z^92OQYTZbRNunB@Q5S^VSVpsBcjyE^`^Qv5?+n}i<@HGUetTqGS)D0mcrB}Ng_i4{ zKrIU#>2B`4ByyC#6Fh*cp=@P2(_ngEAGfM`>v#x}r_ z?cN`(3F>{9SZR`j311M#xBqFcd%y*IV<3Fhoxe8mlksR-LMLH~R=r~FD#~cD+X9;z zG64a1j5%3=G}j6o)bP7Qo4V=|K!mP6F9i`Z75?j837+S}7Fmmg>bK{|RmzuavfETdCawaZ_RB|xOHWIQw~&3<6(}^XOW6zIH<7B*fQWl2 zy8=`lyZw|}#AwOMCL%o*f;Vb?w~w3wC*=OU^*A(2;?2r~7bL>Il(v@{vY9m9K>gHl zmHH@-Wm~swiFXTzl*`@qpl>voSZ_<+dFONu+zHXQmK)Lbr)MaMBsxBkX|M|@)#aDo z7JM~5voyuBY6374u^8!0EHZHE>&`lOF%3+muY(}%E}35R2C+TC}Tkv ztwa4sPpW-Rg18iT9|(ceT}Yn9Lug$^@@B+mzr;#_PX=|pXkhUZ)0{z4uX|~E*?ICO z#UZoH+mR%$NuBLpRp;KS(p0tOh$&S_sP0npV`K8>(oc0Unq3gy0ifP3cP+bv?p9RV z2$XZHn-}^}go(pz9sEQdi4p@XnR(Pcc70er>?(%RxEvt;R5qLL@Gwz-xIG=M?@v3h zQd~_vX14roeaE<8Vw$R4y1pZa7sv89;P#}kJBnGkKJ~9<)`T8u8;&p@C0o@SaD8{P zHX5VA#s9J_n@<=q`POPIIb0THgFv}@(#aCn@nsE4v0WaZ>--_^e0;5>dUIaabkbHs zr}gw+^k%Wera2qBNt!p+sJ(4Cf!@^7yK$}cY>XGowy1f}fim?I$Pm?;sP9jI9nO6* zj{d7O#EU3M=<4xgLzgjO_j};IK;U~U_0J8HM+t+QMsCh+NIezv{HW1dxL{%ax;MZr z^9HKPD&9pgf?$0RZ!V|*J9b%FH3kFAb&kVPk$wE&Ybh!Ot6IS| zj#f)>(P=~I@XDvPy<<)sND8^1Z;6r^_uPv#ni0huu;qLqL*?%q8d#H`cmaazKd4-g zWc8s9mr6kRdkhCQTb2{$swq`Abl*lQ72+oD2mxYWrP;FhRHo7V_5QMb*AL@aXrqrO zJ0S%q0iZ4oOQk&`=ckt}SV}#?+h-$u{*yNQ@LKS9kks2N-~Xn#bv*H|*_fbgU7gAo z>CWtfM}vjy-u`;r96*(Cw0TzxbNX;1C0k^oZ3F0~7Of9Ys!?L>8w34O-u}(l0xGyb zX}%6Q{uwwq>2T)*dHA`u^J{Oz${lD8N6G?d4(=D8z4B)6A5M4*)f;U#kG2H9+r~)e zE4F7l6;*%II389zRPLANh`=_nE`g>;F(*g+zdlA?WDcq9gPAqwe= z2g1w^#I#s6Tozve)yrvV|(5j zuQH}%7^{b{5x~Z|!wG|Q@lkjcaHngTUkKTL;ci`=Z-@jG^FR5bdIKr8A0qDe96-r$ zyg(*p19kpq)gqX5<32)(hfS#I^2Xg7CuC7b)nf3ljkMIyWRIUE*)d|UpV z1kr+Qv|(xLVDuzvJe7h!yTiUn+FeGO@Ko!pHc*iDpSE~MjV{-kh4MR?RhKe5ySh$BKZ?7Yaz)4$=ww58No5Zz zn9BUh>ZuhQEWBB1*KM%G{*jyHK zY7ZE5z$2T@iCk z3@T>jQKc41S0>3agaijGjQBLERDXB)#_jvFuAw@=5TNw|QhhjzloX!_)Fn;!N!}4) zV&)KY9?a0l&E8Hfgj%0oU*;(v*Ehvqq3x03HC@W+d7T%{mI!sefkmFGB;@jio$LwT z*(>tniuLhwi6{hn6You-{U(iuYaiI59_GiCf+2KM|PHupC zv?>J*Ud}7^2>E8niZ7>)jreh6zV zAm({#SOrda$@i^@@9v-qj5=3Z^QXhJTV0`y&A1MnG`&|zjvWu`G?bSo(-q_@}^ z+In~1+N-+q5RRg^ZEqEv75A+$;xH?X34B<+P%2MAudb=vnEz3&T4`es7X8hEr(_dH z+N((Mqkv{fZ)kdcetwn4NHPm;V9-K0`~Czi;9C39p?Q;{QZR*9JnrVr?V9$b3|SBj zvJt2J*cpO@DhK85E1c4JIcvdx|+eZ!t%=8Y-;5d=Y<_-=o5k5|Sd~WV|SOVtN zD+TDQUe{ZF%C5R$AmI=&R>G=|&eL~#%uEz|9klhZK{qNt| zUGD1Ox1+k$xN17?Pnd&fG`a7ms&-zOb?S8SZ0^dF|K7%ta4Ir22OX-W)+Z9B+qC|^Yh5Ct_WrA_-2SI#wBBC& zQ2TC=xA4h=3dPeh@S24a1`2N%VDq@Iq5>0z2r~U4**&O4sj*XnImBdSBJVkd0VAfz zc|Kg`5H>kb>AO(%eOUE7p%%^?qrZ4#56FHN^tngl&WvB&u$FO01{1{2wmj# z_@~*2aR<84)W#!n@<6Jdc1eDG=ZU?Y&-S2P%zuc1(We3T27LVYou|C#I4s2yNB8YxEz54QFHGY7l@LYw9YM|~o|jX$RP92rT?ncBUhb!& zD)Yu+%7kSX_$+}7qlvf?Q-#M)aMuEXp)gT)uW3WIGl9nN+#$sb+Ps#bC@)Hi7iZ1f z-X9z`FBbM1P2$c;&pSvhRc28%cbU~gN=lCQ7JEn&8Dl(& z8G2x@5#Q1AjAuf$j)f5o4SThCieV5w+#JtKZlt^W&Xj0&!N-b-+6j^JdtorRNmD~S zFV>gkV`!#*=}BaR@DK#sXmC71-2V}RPi!N&hekSCtVjh!6tMw`(sU_Fo1^p07RxPR zqkVlcvQZ-IV8%jhamVH>4WgE^x)TET8})_zE^uNbf}GDafN+ya?H51qPYIW!FLs5a zBym2qCqJXxp4Yir;a$n~F}QmrdJ41Ea{FH}yvc($AOwp4Ry_rMKX7N0 ziV3V~rww{vF84NSk{3%Ofu`MJMw|;xj)ZUOZc29aTpIxbPzyiU;qWc?qTa!ibPSeS z1ktTeXRrGuOM3wCVqHl-_fx=yYniVJZPzJ{W}Th)v)hhdAY{VE#_;JjGs=I%Z+vL5 z_^9;(k_^H-NdKR?Kb)R_8X*#`M~)@8Jo)ai7w=|-4U-D1Rs@*@t|=hUl!a2z1t8`z z@F#qExW7B2^_mWtm~NFuC;qGrL7`MClJGOuD3F&^%0(~HxatcAEpP2U1)l-U#|AIe zvtHt%U?u7cJ}&RjlFasUa@2={ zE}iw{bP^J6d}VKWpj@7o#?gP*`A!mp zn#%?NiO`IN<6)|=o#WQ?Sml*rt|aGM{aQz_Mk7}h7tsnql_;<@&XG{*{H~mp+9JA= zWlz?#CJzvx$s&@eNaS>Yc8~J=Y(h>p@M>>_6*~^ST3X$viN&km$$9(pxSmOqSnmvc zm6itOr^za%La;F+MGOH|To_U+=%Qe;pP$L}tCC81o((4z48=-&C8bCxE>>YjaflID zfWdQsiP;7Atq3Q?Xs??}er1utw@?BsrmMBN-YB9XVTVBz$2{h37)*NXFFL~^t1y@{ zYuTc~H3#n$bD|~88=TXRI$1Dk#kaQ%cDJV0EJ-U#qm=LuQ3GKZ?f8N_u7U=JtGp6Ta)`>K4nCE|{I%cDL={@t7hYM+xL}bmQ zBN#$sP>@|)?{QGeXf8y(cKFSAVVhZVCBO#-goGu;UXqJhFqvgJXIj?(PTk%5ZV!}Yyyt4PI?h?R=+$g%4&~`>Izzw~+NGRVWs~MFlq)ZsNS*CU z;nD@0e`Rts`f(UY;PuI}(FylK*!EqAQYbOOk#W=p=4u^$utiUQ`A42bCqW$r(UH?3D*EpYX8i*(B-n+IN z30d(u%|z5=D2X?S)*T0qi5;ii=w?EqY(jNlx&gr$;MO1>5w`3}7 zRt`#kJeFNjE*=a9?L^iaQhUjmM63;m z%p*0HQ!Yyz(u`?8qBNwrI8bzk=3vfXwMEZ<6s2$?EfyCx98xdg6gwpJXa#!37 zT5o)_Z*?DmL_6+RYgStDT*#>Waa%5ul4N{9F~bH%-r99}-3 zyZU(w=$!8$WmnuZMMp@H7TMOIVsXB`M>LK=caFd?$S5fxEV2Ehwu*O#LJt;L@d^e-FiXnI#nWc*FxCs%V_ zbtoT{VmJAK=I~PC8`s(WX~zOxh{GQKT=;LCben#BY$+s{#~k8*-Q2ssIU1j#y=vZ2 zb}G#af09De)oFPb(bCnrAj%5SzFYbnE3R36$8W(^s(wp?!=uaJSu1;cb#i*jPI?;7u&J1ZyW zchJVe#m%B?cUV4u3cV2^bk(5jspAu};8Bu}E*UdtLvjCZ)vlZkU>&!h!N(R`BT8NS zR;oQ{uT+Oj4_+4|hl)VeCk`xc-*+KzTBd$2XHwrQ7q4QjeCIk+U}7Dk>*X@r`OxV+ zCn$eU1XKyxAVGy~M$_2_X$+RONeiC4}W<$|&UlUr;VJ^ve5jFAM5VymO1t)N2x=x_3h zLy0$IT3GOpl}b`Pphn<43=CcK=BTW!26N1fD)RG(vI8ed^4j0^lR!Xga z6bZFj9&V;COu3Tzd|V+JL%(*M(W%761o0?*a0GP8w2SPONW2ehG>4tsDah+wn#m}#W5dy&&#qX{;TPXH^E6+(h zcq)q~zthQsp_UKUwzQIyu==Fq$ByB_N~=-0;%=#=czMMavzIs~hlOGq(%o7IiUQvN z@~pi+gms8a6&tJ|b{GoEQXO~wix=v!5V7Q(TZE5x1zcig7I8$CTu7CNv=^O{n6DZRagMCQ+3AUJ54W67&xFRoWy z_ZK&XgQht0$dytNk!yPhV`o0jld-cO5Anc_Ga@13UDUDWES2jgFwM@EW>`BCeUQYi z3?*KsbytR=zdSW)jaWAW0v|6m1b_FuS?m`Fm(ffj0ug#8K10t ziZePPGCJ$IoyXQPJrdvwYq2cZ8ngK2*G)0efnTazg*iO2Mi_@*mFL^u z?qVq56TK`bw(z?Ae0Z4!*DT2Jw>5lV(d3susM>2UKa)5l;%xRoNMDc|Xr?(J^mb$0 z`Z8XxQ=9t~#)f8>iui5oE+nkXG?rEkU+Bo%@Fd_&!G&0y>iSo2isN)fbWFx^Rk38w zEJY{BgVWl+P^anPo1HC8uxeijIF)v#(S?atQK296X5`-E^&2w5^mb|{Uayb92_;_l zc5#j^YZ}dP^lG{uaUD1=dYImswGyQC%+8-pE_`#bfrIglRufdc|9!x~%ltV5YWUvv_dP(bOv0|z5;&tia%}dra<2Ew)Veik`v3OlQy^-vyr>o^lmS%?{ z-CWT_n%EoCxq6mnW|;+&{~QGU&eKodeuyA}m-K^;-7U=TC2u%yXoyNLr?q5^{XxSm zp(&Loey7|B;eOnR@wGpK4_)U?^yRId)K3hpyzwFhN~*j{H05#+cazmjaITv}hPbS1 zsuDmT3r&@CxUdczXT!|1(${H6Qse7$*PUIv1|IfesI8d!-b5pgB`hhRxTpA+;@;n5 z5m5`R593>He}JB9P2L}sQn>oWD;4e;v_6*14EH6zNU_wU+9;kTcf!ltJrTJ{he5+3 zkV>fx7r;ST&%%dQKaK4C;^W815*<>aa$SQD%dKm9dH!)o&p3hjJ979B-@se_8*dyP z>*yMWY{6Umud9)c`PbPBlu8rF%gvV>ihy7U%qv$q&zpBoZ30 zS@q0ru(ft!aJk+UFd9wEe8n>BECl90Yqq+?Vl$bD=j%yL1{n;=tL$EM@bSyn^$`$= z1RxP)sZ<*lnbx$5Od86UsXMdVlE!ThCrZGy$khnq4`bGT9se6j9g;X{{Nq$;?}4B# zTR*M}G`%MnLw3?|ggi$Z+WoZCmBMsOQ~@ZV#F+%l=?R5fHQIhmbQGzZiQ67gJ1ui# zNd#3ZJ8y1kl4Y<^nl_nkOw(UzFy@E6VwwuBEYoTS<-@KEP299Um_@!XmjM)AoDXqi z@Z;dkSL;G&$~3YC240h@|4hm&dq*0YQ<@As!7Ln~D~@cCyoJO5L+kCjn0&dWyZcvc z7UR#Lpx*mUfApLT;;T((B}E%!#5+4Xfqqb~@^r-CURwi($dc_(th_RlIXP7;)?$8WJP9xk1W6;<-<7q@xkUVNM{OLtRM~*8ag8w_pwQsp~PzCC>jD zhR3nXDOLny#8Q{k&E3M zKek*S&gTZrH5=8By4~I1eZv1QIQW;r*TER6f!RK#T!9s%#*>Px`F2a%H`NGJ$(Ahw z$jH?FlSJvFzRa9}CKN0@d`_tvOK}C{uzbtewwmaORk!~YAQVR8vbUuqkJ~#ctL7!g z`oh@7>UcEco0Xltw|`Z9*&eT%35kIHb^M!4Uxg~s0)&4lFcFviKYzJ!CbZ{9k8-@RXN?_3qsHl^-DTD%X?ky&O>aIb5WMWUw3cQc2S} z(A?Rga2G%&%Z7}^kVj8nb@{pWBOf~Mk zY?p6yx!V79VVefvlMTOy*TV4_DyMnelaq|AR&8qC-^Cltw|#zND0w}8Uu$YS)jwX7 zR@vk5NLXVKlsq7QL_1?sR+Y z>K99cmzD|hAK4%z4jbYmz`?u=3Rx=ex;|MI((<@NaDH&2EDX=B+fBU7vDu=KkADbr zVKSaTHni&v!4kq@NwN~WCmNTzJgQ$UD9M*hUBA>b-mljxwl2_YWjEh!d%hUMeTO87 zGQs&u(fOZ--|@WzJa(<)Sbz1|j#e#!H=e2F9tS7rB9+5&cNn3p+gaB_-@oJbaIO+h zhIt@i`xWX7z&$Ct_e}_eH3=I#a#~Pv&Ul`0QAIzaVPa;)8?PX8bsg#>6idIOQT+y> zUFF3m^CXPlFmQ!U)S0W<+qWJV8&l1*_8(E{F}XHY8L-hH*&i<{ypWKQ$tee-P)M=f zSQ9V^NmEaL#VK}4M4f66v5UjvD1c%9`ZL0%O5V6XA``-pv1653gCVch4$*jYh0E1$ z8`8Lg*JX1^$WLWiGJ8a=?rZ+A_)&Jp;~l_x6ZQkN5`ji=#=R)*pMd^!c#7nlQlFMc z|4)NqU^OvOW}vOyVYd0HZuIbR+k7At9X_@MKpUCH?{OBEI46@;+HM}?3wE2d@Mq4~2Cdp;M$(Wr5 zrvLJErgU)vY4zm`KzcD$;FdC^VA-_}OS!#!kb^C*!rB8As)7DfBpjjtxtjPMrL-P?UBV5_h2eg zElqMTxl+zGsfCY9m(3NMj3go@%`+HMG~2NbS#i5S;PQ;_mAJA^Cc|=uibX?779w!> z8Li}d=>&fur#UVe`H&LEIE6Jrp}7Mnx4)t>dpa5jh9RKCP!{UUg?cNJm<}0WT3xP4 z3y%0SkkI`zv@F~6La?Hp}MVDbw=haeJ*thznLiXy7zb@~!y;w>JD z9paK}P5ko>IB|h}9A>ojbA!vYD9|A_zlxGi<@C}(tVg1_96MWXyNM~8L>Ag6Py%<6T+NBh$Av#6fDH^5m~l(_C3(!BWh$LlRU4d066 z_gwb7vWm2(6cH0DP$*5C%K#{h-ifSj(O7u%k3YhY+wSkxw)_wj_?T$9pK)6+t|4Oc zELT0I@+3H#OB?L)O(^m{h23j>T-WMo|BVPc`znjFdVCx zbAYWWU<_z#DkjDGDX7w3smNSAqAWq=B*Y$ITl|A2di;e@NX%-cL}g@U1a`8x{F%V{ zv^T!8rR(Kp)w3#De-R~qzS?l&;N=FgIP?zLOup@YA8z^7^X{DFP^T3A>3)&1$>E4X z;5a;Ixmg^4$ZYvT#Soj*>09@k zaEik6uaDR(l&Uf2wLQV#RFwX#_(imvXq2XR)hh4M+A8WOlA4qxYT0teer4DP_LT4| zNY4=H)tdivGn}V#zJy@T?^2AmRn&a3#sFi4=CY5V-ALH7kh&MbisxxY?InLWZe zQLJUaK~ilmT8f*QwA%T3h?ZyEcC1~v{pp&cv2jOXWs21E)_a5#<#+7;-=OZ{Z9gim z?2qB~4?ErG`rMmb$X9%2cgLtGZh0K5{5@kVR8+)(y8vLOCOG<&isc z>x_>=2>SZzhlYSfD<;(7sG(btgBJA@O5sJ)AIj5I-U9_8r4B@9DbouTTgt#KL3|4> zB2kf~yiQuq{Skn13T`(t@Vq?g$tH`+J1a}6_aIPwz^yKB;LGqGv$qBWwRLdT^VI4C z^n=R~f~1x*xN7{M;uZ*60^MALifOOuF=#X!jIDhgI^x%RC1VzedBOhvIDly$Wf>9( z>(+Bz@x%Mfc*E90|ka9xdxBTNH|@FgMVtwwaG9onbZ+DW0C>dk|68_@!t<27m6?j zjmh3wmzDV6kMq|T5i(xyXgk9azW+Yw>krm4AwF7hUUpTe{eS#@Yt#K7;=6>%6|2SN+=r4#%?1{z!gwT1~2OuIq&1M20?2oM?1h z%SN9=vVZ-$uUC0_`Q~7(7y+243_6YaN1-s>$mZjwV02ul_ZI(iuz>GQ2;P9CFbpDM z900x-O{6#21g%c);7N;J?N5`? z(MboPk$;AhoRF?!1a8*!{w*H$I)7laMHv~?=ikF0@e5kMP*&hGKR=1%@lq6t7`pL9 zPN4o!oCKymVYfRPtM(uB{>#XP+aY$d;ylW!g#`aGdszNJ2ytOBUu96Urk_AL>Kz3F z!-|~Yn1qUW{AWB|HmqOH5_nISX|2n(LzS`^^{Vz{cpqfJ|c_8Bovz|4Cd`yxQ8yk3wnpc9WXeAu-2DnI$Ql; ztDbP`6WgPAlZQYLcU2%s-C#cDm6wlUQ*079p09OPG8=Lu`lkUAf<~gfV>vf8pBU@@aE*0S^T98;U$gd#BnP$Sxi(PhEM`66U-!4D2YKfSbU z8uH(Zm!8>M9*O>^^6UyOhl@Ob@t`J?OtEU&9!<@rF>%fX%vJCQJl1`fv*73xl3mo# zrA+$c;IZ6b|4stf`osGjCXmj3rs7KHJb>eb=nBGMiBp6p{jeTXV=+y!Vhv#1)qu!k zc-746>nFArOWT&M=m%{(#x~%v&vJZx_A+;IwhU|Nl*zr%VIatU>o5>}z}=)y=s&x3 zgeb)4&v0V1h}6W4jPf=+qttA++cAK5(t{VVhZ`LUVf*l*OUC?iS0>E--)`)6+KS3M zE7Mc{1Q&bBAYen^{QPoH_-yR{G9P?_4oWIke1VYE_ke|Z6Tdy3gu+oO)zmPVE+TnH zp%B#_fC%$>ckFHh2?i-rEuzED*GXn&|( zwc@^mw}F5AG*#GZzt-ptzO^b4eAu(GTzB7Sx+9r*yX<%tx5f9zfmEPobW~2RF~9Cu z#6;eJpB<|B)xA~ca+RWLzS!1kw?84E>2$3!-|pEyS^3T3c;fUfmB8&MCskt~QTPuP z&N3S&MsZJHK?I4lxJ`6cprjP1=(ucff7w}~W1})Fk;d*kam%01<;pzT*2JNGWjh~5 zg2s9b@+thcUm}zQLu0kh`t^HxHaLShIh`e&^i=#9(X!pfEEWO8Msh_a^H3DCzJ=Uw zqBd&vAo>sxGyi%&kSiKL62=Q9iG`f!+2Q(tMhOSD$$V37%kzv{R68$d;cA9A)%^}Z zUnL|0jwsg%C9oA9uCy*T*i)-lbl`dxJP=K6r_~walvxr9hR2ZvV{p`l`UX-%Db+r3 zIK)KdiVj?xFKz~p@x)K&OD0~QNomiNOsG=RGucr!GJYQEy*k*sNn|iMN#hEmKQr&} zx$+Lf+k1dPHJG0unYaif%aPq0th!zGG+17!o%r?qko#pIs>*%{{Z4lnGjMET@~N^9 z&_u=Vb-6l@cyF_Qdv+^1ruS#_jsbZi3E6sz3(P0)AWA_=v{TMTU)TVWTpnxcVYo{5 z@aBq;EvdSAkLbJFP}fTp?QFr22q0w<9*D}#5gyOlrla~&doUB*9e||P zgLe{09xp^t@|andtk!M%L7_l-92!YkUE6t}o+0mx=ZMYJT|P zT-)+s*u`>RU*9G2o9)NDd~&5cv&IBYF33)lEOc_>6oQ%3v~X^dnd1cUMINhzgR9MG z(j8CBHW6z*y>5xWjME4#Bx`%v*Y)+?BB&s=b9on9P2;}^2qYfy+D{j$=LR`%4MYol zzKUgV@X-RwWqT~zUgS5Cu)%Tlt2CDFXa4giHN}@|^2&+(#iqEj-aca75mK|~8t4j9U9QM#>rq>>`g~Axk(cr*UtLg#1j<~MbG7_b#ZNywAB)^-qtSlxcjUqdcBvAG z?)z1>+YrYp!piX%dvleBi_b`5{A}lz*_%jy_x|DjTv2o$m5Pm=X7i-S!m&eZ39DAIPzbomx>7ZS?qe>OgbX+bI4P4$@14J*sZ$y-b>9t* z+2B?n-aJJCqnxT8vU%@%NECafqGJm-`i7}kDH)mX?fXtVgz-**HBJu5 zBLSUZk`ge~yCO;=CJG49(Dcujo}yoOrVo~r{-N7%Ka`mN+%?`sqLMDPqn2#Fze0tV z!&7P&8PRGr$-SIh0~1mJ>1w)2g8kx&;MMV;>9BsCj$rtU*N}S+fJ-lFgT4UlT9R@> zD*lGeRePD2hkH#Wf=3w8^DzIQ-VYwc-z5CMcd(}fV%sB(_1}5y%R6V-8lf_~g!_jG z`Xr;HEdB7f-NMi#p~s?mA+AdiW9mDu*Yc#^8B1lbaF}G;U$hEsxK4y*{R#{d;Ufe7 zdLlOYxk--tl&~%TipL^!*Se0*M6>1U6f4u%U#aL?E-+y&mtB>wZZ1tqdnXKULW^ue z+0pjaYM31r0!Fz}*RLfmFuTbw45sW+Y4pz`rNNHXSSbjBxZ}TGCC4ZE?eE;6?p;T`+ z?8kJxO0`ijoQ&6Pf!}-9ngraHdbYAG@+hQw+Q_f1i{&qZS`3a)+0&|E!7!S0G%A&4 zDO}cs0;2?4*~+fqXS$8yJxb{Bb1IKX4LOg+Afce(KID;1Y3dF&@&sc#ve)P|F7+sc z-Tk#S6Gx~*{3nu-Pv41CBM?lL1HW05fox#wCj>9Ff9C{LM7-gp2+6@f53Eo`cc%QP zK9N^$t(#EN(?_k7(y$Jb{q~ZltbohJcNCd>|Bob)(EA&73a9gg6T5n=i$Yg-_f|`p ztAI2`<8*ks^ z|Mh-oP6=AaBBE)hQfe{&`=*_rP^vkk|iz z_xNi&Ok4)P2eShQ=JxixCvU{MVF<%9YNc}Zs0*X8f!I?xER#oEs^5mg+e`TN zZGr??HKdrd6+R4)r!1k%1icK<2h2SUs4o!Wp`OZEM~ijQrbWUTN#$%<+kE}Y>akKm zEY^O-QWH--UTZM6yBZq}{K8HNc@EsX1tw4So0n%CVeQg4xuqOv4d%Z}4r_rBR{Tiq z?zafyLtnh3<1U>ofOTOf2BLNRD}FS0@yuHL&x%?L1MztHXtflnga7)JPHdoBT0CWq zXXmE}34I`Uu@oGCD-kXJfNJ{2B7!f8`A?;sr*=uj%mr z_~H}H!uI}OM*04cPj!?c->wJKl2Gicri9);7{xhXY!7^cS?yIJZN&%fTdowh0o?c0 z1v$L2DU|6;YQee(`#>sBN_*XCT4#&a{G9!#ZD)jy@>f%-jaQxuSni&D?b9GJ#kd`+A{!%2ayXgoe3^>z36zJ*!3 z;Uz0nY|WE<@8~S~CdxS9A+@Q>cv7&@ZZGPu8{${UvXd*=2eQW9MX|8XMx~%(ak5uWF%s-Q3hh@L(|1dcvq#NZg=Nm2h8R2G3N;K4( zxE{ZoskcjZiN?eX@JK|1*z0^&tpyqwOa=l#iDvyu*r@*gVvCE<(@cA5*EfR+)*C%a zvl&2bSY+*)`E9)3)Tg z3SI@ALXt~YWQ06kGUV%?T_Mj@cL61&W%C90eW2DVa>6YIRIfgn){DlIQv!VusvO7g zg|J@h6gfO#Z&_R~A?F_7T&%k>S*CrO+5G7f{mi|JiwxwB>>ncdF*Y3!e`=Nq2n@uC ztPUNvZ;Sc*hw85^u^D!SGSMZoSj4OviL+$)5-%|!LDA_DT#qh>*?5knlkq$;%jHR? z>6f5av}7DRYK5~LQOUrCx3{;?`%ka$wMh*I8HkN=KX-oNG%8D}vfl7#y-3@Ah$&HT zQ`dBUFDNW==aD7;bf*Li|K$ulN1bJKr!Qo}W4dPo#fc|l7c>>s_kgRM)Szl~32ZD= zIM_^{3i$UuvzBrl*JFUePeo0S2hL9Vrp0ctm(^>!_QX6|@9QUCc8^WuE_TyzN6P@| zwBg=#c5|g>Cdilm^7fZZ#R_dU&u8_Po$v$I1k|2q@JQuSL4?Me>v62XgKMEq*67#k;6Nnfb~a}ZY)+R_Yw3QTIM#ZhcA=80lnQ0K5Mi;47Q^*IkRuU@-%C#XkS}1%~BQ`2LHPX#>-B|i&1Om;myVgY|ba7 zZ>_h2CPA{Y%M)ShUozc*lyp>X(>%1RVC* z`ja0G`E0bRjw5j6b=HWk?Dp$T(tO+{ygQ>BT4O$(4a-}@v2e)%(1u3C70of>Fz0%a z(d2a=;C;{OFylx9o1yA=eb{Zl3uaz6rfQUa8wq zN@W~AD9e=6_oF1taD(V8%jOeWlkx5|7+_y<2EpS+eA>AyilC!%Q%?y%XA4zJ8cRn) zLJIsp-vT&QObnT~)g?E{-vdHkH@R4O*zsx4T}P7;JTHt8G%-gid}(gddN)|5{z z);fg=+-;d$FcrR=N>&l`lrYo$kl}re#|srjoh;L%3c?_LV`Dso1wXs}E$?N`D!D`5ZS*lhSUTgj8r;A;_!M9YI& zW>PH_5%7)CTspf+xSe}R5qQ;3khM2YQk>eRkL?im17g~kvH7>f;o!DkA|YSQ6f;Mp zr(PHc7DEj-2EQ2yW)f0I2|ZN5v33YDRQgYNr@kLU+HkU?hn;PgyHv7VzmR3z;x2d zZ5GT@(|LCR7PTUEan7m?!~^w_Fd9fc~BfPxqnYyU=0dE$ONs;)q7Bj7p9E5L?Hyw`Xg|xQ(RWpeR3KSnk!% zX5M*hO7l*Em-}(RCP1imejAD)4LNzcX6f%m=gj*T8@{=~`u!Oj5A)t1yl6MJcVNXW z)#zxQ#t-av_2msa1MJMkw_-MG25Y58!ANo8B+4-}=OMAZrei&v8}S?#ws=^{pj!f# zG>-XROs&Gv^&!j>PXU)sr{k48kXDEl+UL|a;5-$}#{Tsqh@NhZlpeZV|7r`ObwJu& zWG&Q@RQOZZ8Io`c^TCJWWnTWpw}`kfQ}65aP4IZl#xolVcPUQ4@7W4m_+bi;O82W3 zQ0`8Z{F1CB&hzr$82x5L=Z=|V>+c>jqMRvkotpT&tTv7>8ZARASvrrGVjd%IS%x|ZPV%!eC-W(0nFi!n&Co)_bQ=u|F0A3Rg z!TOVBm$>HPRU1EJXHFjyo*`WfSlr}9#>IK1qOv>=K7vb8DxTLhdncBCv7=Sw+D+ZK zW1EcwJuEaiSQJlI%0w~0NW>Rlv+N!fGX!C>Lt8=E(fPzDwDXxmM1<+`EzB7WaBJRR266u}S1k_KJ^5}$0N z8cf`-etDswg=zgDvNL? z)vYk5IeUpfN3w#rDLtNvH-KL>pzUm;S0Oz3v&Q?Fq^e&Xsw+-8zETD0$W+bQuyNzA zNMO?Dnc_=Zan zOBKQrV|w@XKkedc4AA)HcX<}}V^;RAFZSziELlx##Id{t?w-+!T9T{+lM=b10@h%R z(O;LxpZzY*6|py&qx_Ru;FTWWB;Ra2#t<)&@_*92?|Vj_xPT{Mf0wlCa*aAc{YDw~ z-2EpjUh#Nz4OpWK+#=d-J+a##hz?e4K;KnQxz9xgRkN+%mW7{h7Q(WE$Fn#Zu|DPn z>Zj*9!(TSB&zg zV=qb$Y$9l?LTT%}*9SCy=`(fg$I?aijmsMJ{i;J6NE7xN`yr~9yGzxA3+??2t%fkd zPnPI0&hrUE7!}J|{=v8a^~f7s9*r`ot?y$$Wh;!?T9@%FjEO)?)u;)%x%ao1as}EZ z4jZUWMe4P>W_pi~nq=ymdy#B*2hXeYEfzAz@+3k=zvvBJ9p@EISf?%_1gpL;Qg9Vi zwjH}H;@s)d7}_R;$^=huNOS!O%0e{915p$@|3f){U7Fbj?edvlgezfeY(;O zaes1EK0y2ItCo70MX;-@J*1J~lD?_-CDIyA2<1G zKZz$*tKyE#Gg{BFp+%B78OZgsPwDbzxXiP94}Oy6*HK8Up(eFzBeP5 z7Jv_j4X`9BwX@ax1((E*z}wF+{3#Tn8RQrgm=p+TsOI@W6MP>( zM*iZssKOjaa;r<@a}}d4cK>a&v%&cl&;I4_rKun z_RruVeL045bK&7=^Gx)5nXo2_|HG6NDrbpQpcbC+&(?^eoj$LhYjBiTLXRNB!J(Wv zTOPNv?x=v6h<(vrL9}tJ1Y*;%MuPvqPXEBH z1f10!v5aplw(v&~2k_*F;)+0GeA++Idy!`R5#R`f01y}yM#pM`6*aAza`+U`59Na4 zuoGG33@5L|f8~Q=K%l=Nyg+K7MWH*yZ)Vx;vhp_q+`wOKr14?B9EA1#!rXOzt-JlH z9Ypxg@6!1!3fx`f)wCZ6lRta;FeU;6>_WDqMXCcbg=u_e9K$j*+X4*u=rlT6bHCd= z$oz};&5as1^ns$)yl<5lBCGX+e^J?geh!>HTrV@eIdJX%Iq|;^T&SMJOwHDSg86?ANBii&dD%K9|BsV- z87H!@U9%QKmw`n_Mkc=06@q)xUtHD?Ktdd^umJ^Fa~Lo&XFb-7gg+A_|BUj!=)G(L zvkol)&)}%>o?PhI$z}++uKoHdPVZsPr3|d*peNB9=*7&xVlJO4oFBMEJ6rC6?&+L+Dn2Ov@MK;WqLno>AO6*((8N+PZRRgww{F2tEi})>^Tn0 zy13Nm=z0(X0da}w#FGO>YQ^*TjM~)&&nGW>yak1RvX|lkeL~j029bUNL@<2-FQ|)| zKjSr&!kMd(T*(dy^r+7%sHr6yKWa193c( zp*7n>p2`@ns+XvdBy0Z_TrN4m19ir_FA{0A{c2AipbZ)%O0{dfR}XBt%+PK6G{lwx z7#oP#s_a*#cs)15!wK2MFvx_MfG|qOI9bcp+D4~5zzJfH-vpqLcYu1@VPkNHXP}L~%Y>y`#%T?vFT=0e~`(SiTd`tiE?ii&?Co zrQ=dedMygrKsb}Ry<0(oZsYx}+s^RpE{FG*c8x-e-=6ydiMI@9M6G@5Ku*;w_ltv; zueD7c%DaF6N7S?z0R!LueRu>CMfA}@SV=TtfKGF)K0`}zA9C0-s#LQs~!`^Q#A~x3(x~ zXt5@}Q3I@PcQ|(C&!BW}YcbNMGbigO!@`d|H@l6n8qMZIDY69@k9#v!>;|8SCP}mk zb*4&BG8@f-tDr>0ZB5J)wYY;lu2L~e;Cz+~v0SQ#G4DrQ;bu9>J_&kv`)xMTa zgmiE`S^IhNiH#+y6~>aHGsfMaC{QbTPM9tro)BbK(yHI;p_TH+_5;+;yHL`0AJAn~ zTfK9P9N3Y&y6pVCLtEdWf=L{*40GAm;C*klqqQj)95yEV5oxlv)@D}jCEieCnQjdF z4jhV+P1Ilzcq+?CyfAmQCEF~4NsBV<;eP53UQsXU;mCL0pg?WJgD-80nYMHE^Xqni zl=atC>NY=<6>t}~cq!IomLo5p@_gFU?EM93F6o?h$JusAAg$i`%yJ2B2)!t_M*b|; zbudwC|6ZNu#PzfXcm4f zR6W_siRuSj7Su`8G}2;_XA*X3{Y0iI-ver`;~7TFyuFW4PjTye;~90-^}V)f`cgQH zXWxl^Xvw{A&5gbGHlgb|K&=IAfEgq9dTL+cL>(UPT(`?MJAP9##xrTDrWEqH9Cff% z8hj>&zAw?KO8`1Z3^Gsx65lTm_cyWw3Cw208G=%|9|5~sQIU(>D^Z|NuBfvj-A^ng zP5<%lleKDOof{1nnVh!Z0k!J&;Yi&=EK;1F14VVg;HqvtyB z1rXrjf1LUw5sR8k$TJ1OS#wsX8rN%@?H{tXUS(rH15cI2r}KZ30U8tJR#jHwFa=%> ziq1jxtF$TKP`j4tw+yCG$8~74BEJo;7bm)do5h;5U(dm~Nf~clO}TJv`9{Jo$Jo+? zrKaAwC{vapxL#1I;g8H%(hEs%pTWfemOcpHG}3=dADvIrroqn2aYd##%k{}RhjUYf zy7}=+H?1ELA3V7Gs;n&mGYFF)7;D@rYgYolh|{nojFY?p$w&4uAOv?P-J`liiWx`q zO#h`3)Tz==xx8HDuDz4Ze2_4oQ{uv5I`ABJwO03ZV=$c&>%G&N?H#a4g1U{^wsR=q zW37u9(w%GLfSGq{f>SJg0)bvJ$rkWh6xAJOX9Xh;D_!=|{H4S*fD`qAfsjr)83#91 z2kj1M0D01hHq4LrIeK?2KT6^DT*HL7vjgDHiI?GEPvexjjI>1j5cUx8umQ=n-*Nc3 zlPYJC1~(iwECtqydsi>nlc+k}49N*+0M=}uo`U6~ayrlqG23OU>98MAcn=zBCIUGA z>rNsQ7X^eW+Bezj7vYRA)m?KZTT^X_PQauG_;uNWmLD#ip}Pd^ z{Y7sBXyD;sM~;ug9!CyW`Y9Sz#XNgCKSS!e(0_)38^ueME&_%rI}5+tpnH=UVqk~Z zyNSuEpeX?j6Z0O&o<+1NlcLtD^n$X8% zX$`e5AQ(UQqlyh%aud@__VBzP?0Q3C@82p$Xs7@Q@o(>d$KMq@vQ-Wyv`|i%_9ezu zHA;szIgZSJ6U&a=ZxFrUQfTS?tayZYdxN_s?lX-GMW=hCTb&LYNfqw=ew>s3o2|{H z1c*hiv=4_{y_Lqi#&{{=)iRt zfx+^osHR`95{=inH}*^7+eNhPnwg?uPX|T*adykLZ^()mMyJ({Tz;oS_c~XSr{%Uo zxT=7v`$->MT-HM2J(S8=)_RhiHYS|X`5AR!>?V)R@|WYug1b0w^Y>^~ABgQ=74*|y z&^KH$YdBU}PCtDQQ{$2Yd=l12LTjgdalm}(v}Fk2Zm&1@__egI0_VWoyTlfZB}Qhc z=N#)%{K2|;_+b@urZ}dioD^~RG=#MthKaNMqJ7?Sd~3uH*&b*bGhV}mN#tCWdv+NY z8kI&A(7}mUS)yex80ZNl@kK!Ntf;3Vi)r6ONx%$f|HO>s{6_N&pgBY z0mLbt^u&&jZx54&Sz|)Q$+gly^&9n41N`eg%qjjY7ngiN3ecNLXN3#_$-w6%3}xGm z1XJvqE>WgYP;)yf&bp~W#+P#?=7VWglMaUZFEl!}Nt(Uxw&|z_$!()Udm5Fx)rR$i zB-D7*$7To;OqA4GJzSl;Nn8tpL|~A&43#wv?Ub>=uaHE6L!D>Euw64l-`3Wm-;$C4 zz$!sUIq=Y=g7*z?M=k%^{-x%z-K((Fj2r|dfJ1(v9u_)}gCM$@Bjg3_UFbYuSdOOMu;()7EO5MduqdZZt7I!=ti{w^i}*w?rZ4 zGLR-uQ#Y?Z3=;CZiJNm?yx+l1p4dxd)Dfj}nk%Ki*3q1BQkh7nRmi6DC;qo`B2|V7 zo5DVLX%tNic3Eag=T9Y0O3F1^skJ_5s7iPiE3FB_;w|#mb}C11(y6mG&$e!mp)oH< z4hrvUyZ_9I*%$v!DV<@q){Xj{JtjW&XdKv)KZ5}XGr5DoEipIq7iy^EnWFjTL5 zBKQl#azvn9TMa7yZ>Wqq1Er_L_K8!?@g=c#cV-hakQw|Ut>syKg zg?nloo$MV2l|63z`Y2jYmk1}v+8YC3k6eL7rws)$giOjYDDR1u!kXSXb z-M6PHUPG%5rViduIk8^NP3LTP$JE@8BAFSP+02^@E^VzcIV65tigF8 zJs2BSIHb#X?JZo-f@~IPFBS(Z(%8PSdcr^l<@Ln*TEg{XwmQ&nFV|WMj>5mS1fYR9O!(M1&v5 zge*r6wms^TOP*t*v)IdsifZMV?`v<+chi2qP|gdDKaVO7Y(D89xPd84KIx;&;VkN< z-u}H$@eb7)Nhw2MxqSM4vC6yzhc8Z$D=q{6GFD>oFw4I$Ri8_&c9Kn?-`YsPY6}M! z+>2~2in)@SpYS=N_4CVQ9)_Jrle z#peAI`R}Ek=~wn4f(CN&rzk@juW4KL*)w1#BM;j$p~pk+{F0O9N+T5}boB4OrHQ$| z^Q%PY<2F$K<;45!jHORVEu?G!C@M_*(tS0b9?}6m+f~ajV z^h_Sv1roll`tN$I6hsRLoHHf(nX<8M8sLHbQ6L;4;&@@|g=?j=K}cxpHzNw+F@Cm7 zSs(@kw1SLKK3%NWbTNADi$VKEZaAam?0cLRRjfz0_YW%5x%vwBtVfZTd>?}0x5!R_ zHy#V7@c6_rMa`xEEcMbp{E{>Hb6|LIy|LK)9&Xxzqq9p)EHb>uj<4NoN3BVzc*hg6 zKTWPy@Its2Uyj^3N8VfBkMu?%|4~QxgEoTbhA$L=#v}d9W-dp6GkY<8v*v;(fc{;+lRtY@Tnw5#oT3Okym^gOIdtM zZI-^VGa=9~<0PelbZ_??G0`ymXuEn*1jyg$`ZG??`ppu zhY{S1AFbiMS$QnvbP4it8)i^Yn3QB3+IiMaLDN65ajMFnW71c)J!qXpos#(}-f7aMaUZhU zRE5mg%oT(TPI)@hOGl>NyjT^2ZUrbTe7wB%#5sbP znW+IhzGfaPi2EHytF{L(HCkIRk8Mnt@%<3mi!5lM&-{Yon}U6}EadbEcJH`P51cvT zao$;+_9!}B>e-kkr_}EquVJ5HVrr$Zt#V6qU{L{1d@irx*;<1Vs?djh$FvVZ~g?lekh`mmYxe=R27yyPWN42@|_8~+8&|p zQznG)K)tu4-50nR&UN7ZsHsRUKEpd9t)oQ$RVOHDQ@q@+6rD zxuVk~j!gD>_!oEHmQ(nR2 zR=)>csVU{7Skm@10zNP9J&FvxO=23p86nkum}MaI7MLqUof{{zdL1yalM)Dj`X(bK z3_eld-_7seI4>srl3dWy@Is0z^$#D$=SUZc78I71KwIHU9-rTe^AWkFcA+?MAbn*6X}3+raDU7U-(b~dU{(3hGzQz zwp9bzZlXx2gvzNtXpi?#3G+AH>cb6KppG|+d;j}X|At!uA8xyS=U>8yKLz~L$=f|C z4Tm(MJpYec83KryR|EW?==%Tpy`TVqyQyWQxBqTC|Ni-s1Tc<*TeGL#{V+~|3QjfIG!-#R=2&s_|E@eVjo;!oDMYimp1ahvp+Hil+X(v z=hM#rjs1mA(*fg3JoSI5jejPvH1q^;U++{L{?8c!$4bC>8U*sku<_3v+EW4C+vRk^ zBWL?jQ2iL~-!(X#tuSTUgjzP_g>MOr=)q?Ex`hZnjIyM+f|>Ep9>- znI1&+-j9O9KOHi>vJ{d>Pxh+(*O!Bu=6_9`vG))o_B_WqdJp%Dw}RB_riE@(i7YYe z1BJu&sMfuzLPeRVa$bkmKU68eDFlax#xZG$Q$sCbCkVYZ5OyQ<)^fL4Z^!B1t>tHW zzkzXF`$2zMZNpgB_K?tgwPMKYuokRS4wSoJkeWULLeE!qy<_6$QiLXM^-3WBOWfd! z#jb)>%PPcP>FgZO_e-Goy^ejRZu^L&7z$mfmCr~-bMwuRf}0TrDR zD6w|`_rMdf)*bd3uiugpM-_xA-!)NuGXCz~?6OqP;doS1n)?taa-RYaE)E}u|0;BW zgc^$-BqXE(K(G1{2iLJ_5@=%`Xw03$ZASsvbrXODHQA|B%z+F+_cZ{QQ7tq!+n!q& z9kZ>-mNXka5Zw8VV`Q!6v=lm3VPZkA-dqn7b{iZS1_Z#e_c!QkKqFPIb&ldrk7{?! zCzy7-jlGV2n76 zUZZJBRA}gz(HmzRgWnB~`Pk22V-#`_lRA22(E+0X4krg2yT|y;fzMhp@e0 zTB2j^2qYFB@M9CYM=0^(`4fRl>E=5mUM%vvL;;lO?d6ArC6FuiD|d|h(Lq|Ty}4U z12y3FMQYhl?XK|Y<}jcmZ|$Xk>`az0(kW+r)C|n%LQv#_KwLw=aIDIccyv zU*Xa_`O(^TvP^@OQ@PhMMkE_0ASkF9sR!jWHIf*Tk#NKfvgcH#oH+52?=!Dx`bu)T z?jUZP&2dI7tM|bJL7&try|id*@ZtV;C;)qbD+#=OBel~^6Qo3IGWq^i*3z<|FG!M% zRe_x?U)Op1E!~^mOT` z;EvHj66x6SYA?D@IDvuyO*+7A0@8V~Kp7-Lm1^1fB`a7EU5Ma{Y5A`Hp?tjEI9AVK zI;b0Q@GT--FDjSM<@?7lF{EGD0~##+nkD)*{T2I-6*36&%IJJ=t1a26G>N!vnNBw! z%|h3Q5)F`2^rG;bLN$T5-MsYbOy2}hd!|~QgphN0afxDbZlsaw(tz+e`3|wq5jq$* z=~UGD;!9BNno*+?U&u^_NooQkb3*&^=>Rk#u|rj6ei2b%e(~;Dd(+V~s*Jh*r;X|e zyuC@DT0ay#6$I6L+BRvzpB=t`OmZ521xlrnZsRez3sI206HJx%H5-MDj1Z}yJDsiP zuzC=5fuz+vs)%=U=`SP6JZN!_B;bJ7--pz+po|ON?nUM<#SP?i&0TNqRG?uLeQXqp zbZQ^c@XW76c$??@CMzU)W#B2>R?joEeWWW3CC3>5W07dXK>*(3o_fQ?r0{8*9X1$K zfsa+c*-`?%Uo#siR*8w&r@q6!S^Q3{V0r*B2^74gI8E-zeE@P-0x5QM;E=f8*(!P} zfY!q)Hxv{U-&nWRmVEH0jngvTNa;H2%AcW$QRd+aG0M2q!2?pRBfoav=nCDDqwB8A zNE!#o91ial(^sz|v^3-Hu$S@PH{;v8(3NXjpBmGd$~-uu)7i4IT)BxIc7UUK$1N>W z4hv|YL-kSt&_S^) z^DXbcRIy;4*i%HGtPjxvG&{RZ(sS;|t||PvlfC`y8=Bb^YVIMnFnAde6v7aH)}iS` zpQOX#BvusCkxtqA6M*3ltIPr?eYzQ7l?s5__7j6NlOe| zgU|ISZ61=9@C$Q`EUjdB9J})-p4UZS1zi*|B+(d%(A?I?WVtT2{lL4yo&6Y^uu|f; z->i}i^L(>>*T10c3wsIv4)D>aYEvx41ujD+6aJ@TK zCXYi0Z21tRJ;vbBIl4=qxvsYb3i zKVHL=8x2-VwK=|CZ6{V8e2=6=!G>axeT7k>-{a=5y5Sv6DxEh?RQQ+xtgz$4^usc6 zA>=-b+H=Rf@IPXO^07b!-zO9qGk@pjbL{5Z<6%qOM*yfG!0=>1T*CpihmiTjcifH*oeAhpe; zHYTc%hBm$+2!-w7>bsu(ljs7GtxFl8;uUy3^s#(Upj^9YK-+H{^OMjJ`W+D8#~+$L z3~EKWq-^}4%|gwJ?25akTRuVBJ9G-s@eYO!dX&j}Gy)c$E*tB!0T##}WiJlTm4Fr{ zQl_;0Qf?yx-D_}`%K3veAA@c|d9~f5C|s3!%;f@c0<{u&63VqvsFdFLqbp>QcV`9D zY9#yfiD9BGyVqS}rSl=F)vv6t0zm2PT4Z3rOI938g7>A|{^pFHUSEJoN_2;8Q5;k7 zOTce7$(*w8V5Z<)_Pl}qFxEE8VId9~#X2_ZLY!Ih9&dokyk49YU#r`0BT8((IVYm7 z7mMsML;IbO&Zcv!R5Dmjf+aaaAn+$Syr)T3EQ0dQu+4~rvj#+y^TK_-+}pC zxO)F66hzvNj1ykTOqRlRDUW-aR~P)`Y>7yVC@u_cZzo|;-rM^h`lrfD>FPByB~p!l zrwZA;JRP8(`M3c8H8ufdsuGaN*fzl=x_T&6Hn4X#k}!(iB^|!w1}%S6uQFFGE||Rp zXxMJ8MH>XxpIwVlH4i^xnZ9D_yL9j+YImY#((kU9chzg!GsQgnYSOX?*Q>%xBaDkF z@JsfnF}uiX_rk{7Y=(L)|1n_=MX0shFwbhDs3tH*e{Xyp5WvViKHRTAmGEp1KnahI zc+|%nLY%f#T-LLF>ZTu)K-5SCiwqdow>~c99)uHLHZ*gXCRMCGC+4D-oiL*GSdExk zgGFchr6sw*hR07O?Zrgq^@dV*zx*|dDK%VDM%Tmh-eCN@U)DY6i?h=>Jv7Wcq5iCs zNn~bWaq7X>i~Ta^&U1=pl(5RwKC&<2G3Fu#)*amtI#=!_`=G;p?KYuZd6pdB?e9j6 z8tl{S=}IGZ^C+pxtO!`TkxP> zbdjN7{rCxQ;44 f|4LVzHSSUPmbVIMIS=W80ls7$cnFrp#?vLGODvOqwCGqjDPY(PMW>{rj;j#1ps zj)AZJxN!;U>g#B5>u=;cL9%6j)SG??!F{Nr>MvP}hpvH*?Ps&@wNNmdcP~Vjg|jiEz{lI0_HDl+74>J3N_H=^Nc@a!uOn7m2R5Hp&~C|Z72#epKH#ix z7MQ)vythLHGCqY+dzqyY5}w-;GEHm3t=n5)pUXfp(lOWA1>&x2M)8W82toxoj8Yd+ z42qAD%2$rgPx)T~Mi`xfaP3IFB|=`daqH-vXrl#Ce;AB88>o9{i$@80bOLRRK{ z{_tUG2@Q?V0nr*bm4`dI{o!-)-;G}pJ&#&t3`v#@1z;>7>s2{vH* z=WS|y+&><%Gv&lrmXOBfx3tm6Wul^?qQU2a#>K_uu+cMMlNAv9t2poWF zzi0kckc0YX&wt~@pKSi)E|AY$&>Yl%j~N%Vmgi9k2nY{|r~t3L6X;PYgqyqqVvm$d zF5%TGFHg}IVHZU3x6K|w-vaQ-z`j9G@>XO*ia;EQpvJDgb>$DT6JR6cad|bcm?wft zu3C_QwDMas&}uw>X2w``rgtn8%giD|;2{7-Xvl>G2S*XG8oyUF8OEf5iU9fNQig0$ zWxA6DBjow-wZQAe$G}cn4U2#a`k#v&6k3KH+T!@0*{~?1=fsg@Lo3;M#pXUBWlE(oq18(Hw;L-R0k`d|n;>9-| z^bOY<{KN|;Nlkbs~w#(T0A^ZD5!IhfNll008Cp;>~cV*{77W+C@3IUCt*29er zN^0uthEz_TI$^N-H=yoCNQj6;h=r?9`ISaVN~=@4xi$Z z9rBoonHhtQZdR&Gv^b zLH67ofHuRLJ4)m4Q&3;O#t08^Q!1sW3Nw%3k;MPXhrSIO>tWg1Rlb{(X-)CZAburU zk7tK_X}lBzOI8-}*Ai+GaF{qE$w(7m4#XTut&b2zL2_X1e-WB2%pCA%pi}6wu3iio zN2V?mx;M5V5FWv6FKzOeLHq*Lhki6rfJc&OeJdH`9N_>B@zWQ0K!<-0 z*eVTD+~D*=+&P%br1@ZGS>|``Z{aY2ROcBRfy+@NE zoJ^ywM)zZLN6YQ<61xRd7)Tfhw(NvnM~R{$WvV~?uQKF85>icUHVD(nrIHlJBxKH2 zJs3qHep=3f8^I+-0e1Lbn&q7=!NaecS}PDtFqfrkMJS5kTR?E769239zcjZh(5pBZ zW7(p!Wqg`SsxM1-FhnN6zz_ZK@1g&tj7*4D(L~xdvYTR9!C{~2;~G^H4?YWG+3(c3 zu7ffdO+yli#Uj*~v5%&426KBn!VI-NKxbxVem1fCH&F{qFip+C1%>P^i`0_Zote4uE?7qZ$gBE`s1yMmK7u*BouoquyD_4bz!j0XioJ4i9OXzR3 z3uZp_gmS!*s8#HfBd&3JU7ckR+(!y}s|MY}$(@ zDrjs@G%u`H9kU7ab`)A|Zlp3*gg3`4!TbCBA7(0%CY!D?n7Wf@iYfhN-EYsY1vmJ7 zOXea>eh`veppts^r8$8)Uu^#{On3WsflS^hIQn}2cq;m3pjRs;TbAp;~_!Q?df~ z_Cj|sXG|b~c-UL)-m>soU)c^FXi5!KUDF8tohQI`b}4_@eTweLStU z3_bnHr9!(o4X4}I+u@O&ouT-R)6?)%$D@ujJpPCY60zFTR#eNIL;q5>x*XSPja(Cj z4>+s{$4fP$mPWIeTO{H)IoCfW&t9rOqzIG<*QVNw zJ!&ta!YAR&>p^6+r_m0(C94{qXoHcFHS*_|m+L*8Fs8nTKWC+STu><~sSnss*t;Xi z!E2tcBkJBR?N{?&pwGt+MJyp5TzzpPCsgVU0kOq%xJ*np@8C_UzlDDH_6AjMG{d9P zYQx8;8|Bbblmc}2Xl+!FwO7b&_43Ct0T@lFz8wNW-)@VH#W&$`QGFy33g=&8yWL!= zf7E^MrGy;NhFwX;<3sj>9*fKt`h*e|Bs~@IG+w#mgtw^JX-(ZfFC}43Kuf-H07E2ZSgWq3gKOoY{aoy`fV;Xab znm=VSQzErw5|+Vc4z%ud2gk$?<2_s2Rlgn;4zV|P}QaISOiCr1E7OACj!=avu} ztsR_XD%W@YAtlzuE12u{dk3u9QpdKBR^RFzP4brCd=yAq1(EnzP(7hJ--$YRi0J|qM3PrdZ)&2Fm&x4V+06ZW zlJxs@=ew*47%>#ej<)vKx-@Fl60|fYz^DC3oD1-~kfAo=Rz=HmMjHH{38zBt%;My-YYYZ~44>=MwR+U|dn45@k-!QlZ8tEo(^BS)!MKkeu$vuD81~XV zZdj)a6qagcn#|deD3l>j!_l?Plm9Zv9N&3^(V2^3Aptls>4_)ga+h4h$0iV<1Ox?j zyLv~ArYkgc?n)phY!I3rr?3B}dgVy}OVe;H)jVxx(We=+{vt7?3gga7tHL2FbSVq@;v92cqo?!UiHfr7Y zYQB=x+WaVv&W*&oggAm37lgho#s30rJ(WsN6W{h0Dv&PLUsx67_Ly&hMA1K^HN^D>y6HfT!anz40`>&pwa zpW0H;8BLN%20xWW2=P8kdR`rXf52gUSGrWw!=H-t10vlmi232Be%2*h%hQd`x{8Cx z1Lsgm5xJh>h{TYX@oT2OANOo94CbLy>$^&vSQ18RNAFxhcHnrP{FsvB z2)Y%96`OT%IBsZQk!k|UygK$Bs^j7~J$+rb)VMK!KM%iyo?K91VC73x?pzfy&!i7$ zArZ^NUb<(u?gVbDfmxcT5&}u+>z!^19ChyE=<(9|<`CxwWg0^8LA>^M0 z1TS10)HyQ7HiH8ZL;5UZI4TyJoTCS_>P&j*_MZ;f><>5Eqe*r^#ThRjISjgKF6FJC z9oMqrd~gpC871g$>N&1Hs}$9&WHyospi-%S#j+G)c6d%&lrHQ|sSp*xUQ7o?kc}T!44AY`aw`&%QR4Ws|NVA!11R5K` z2KiX6mS&E9edWhRH~Tw)G1hoJY;VZpxX-TS%M<-@3e(Ia=h~k(%Lxb=K%_wnn@_P= z@r~=vld;dDm#sx{caZY78tm zV}dBMkzP!W10PaDda&me^v{x%wVDV6g{-ESwZkElOTGmL4hjG^5|k(o z=Ek`uGp6h5o*&l_UWxpY5AVtGWy$4BjDk!=p2N)5tSwX73Zyd;zQmAy-k|BpT&SG! zcs*8vQV8vwPP8TDzy5L*v(5JjrkxRH{w~wDLM)CV)~tmoXo15{x{L_#<>A9e_eE7X zpMs=3BF|DOt?E);#bV-bx9p-hR}&DrGEy9wV~(fVWg4%V1%>%?Qx$xH5eQ0)J2U5y#+v>R>Oc# z@Oa}pgq$TA@+TbJb~iE`MgbZ|x$s<7`15`yxbos2xkT0btFS&1Ptm!J3G-{8qMbMl z)sdIv$t@F0hmEV=j!VL?cxOxH?O#uhRN^!aT{E79rEv>DvZ70D^I!#E9;wfbONpVA z*-XP!WJUHYaJW5&A(h15u;EneF3mb!1?G>-bgItCHGDmZPoR0x6@zbHD^@5D`+mVW z2IPAVmYw@O5w5Zr!GqKuKya&PM&QN%w?H+&m^>SnN{N-M{^oFK%oSp1c zI~MiRsqq%C`Ccqmnj-V5go^+@K>#kE0zP7oPeV@VUWyML{ki^tx8#mEj*->~7{}=-Kn}ca=;9Mx}~WA>wW{$Xs|jn=Cj)BaOsKz)(mCR?W|} z6C0{m>iBBdPm-Gnj~CY#nxSR|qfm5QEmV(0k@?HQ`vIsT0+W{?)(Yi z0r;YR@{mQ+t{LvGMp2o!`i%*BRRVnNT<*iZ#SNt{=Ap!#8F=8i-O0v7A92|9I=`FF zl!c~y-2FhBxDK!jDOS?Qh+pj*8mOe3objX^3g zYOhQT@%lRQJ|=h8>8q{c%IEbMOe)P|)T>(Vl$ta+-`B9c2BSu9gFjVgTrM!)NBTXf z4%5x`RE;_Qu}$}0O7?nrwc757M})fV!)Z^3r~~j!Y?&CR4jJpY>5rU;!E$-g8;&Ni zCPYRpOVKY%MxQNI>3>OPjq^NRG4v@QMi@+Gj}JT>+P8mt8Y(9?Ei1hsbGq!WVH)Ao zw#FpVaGjRTPoo&J+3aqQ(~vK8LydEbMYAyFOwid{=k{O6n>sydu4ignv)6!-FISv_ z)Nx-=NsWtuqvo5YWN#j}(qdzB9dV}Dq@S;Lp|Y14qm7EZ(LN7R<#3M!)ar*NK+p6< z?I?~(Byz1R@2iaFUvr5@J=_J7PBkVaEgPQ1YvI>7oaK+Ce9rVN9$`yX)#@@koGuMk zth&kf8*81Yr&iy2w-{GwD2RyLN@|)^#EY~*!=(>a#jr;9O(O%OF#6BdDvR>!Mt; zzm`l3Td*dfdZE>W=abb|&t%7yxh>%Ef27!SsvaViObVK$dz;Vady%*57pg`$WJYf* zVSl)4s94P6v`-Tx*1PC>TId@jYXPW9bhwUG-5?MS{}j2ncdbcEJQ0Y6ZDhLZkw@_= zo&T|yz4-P*Bi((GkZSSXJ*z=nWwr|EGaFC5?ZL_+(R6|42nEii%W3@i#=H#k&MG@^ z9v<7!)h$)-N86m^pX(@RmWwyU&8e`I3RUhNd3i=MWQ(qFIJ#1^!5W9{Zb%6X!?aJ{ z32OKd3w-Qp+by@qESXhXXDzNMDFA& z0p^2Mi7QH61bX5_N~LtG`L*~S(W9S6%$iGt!EWH@tBkYZBi+}~NfVQ~c@iK+G`i-h zY6gm8$;DCTOD}{qUn343rQ^9O6K3ZGc!mT+0B>TI&%@eoM|sp+P{PWGQjYEqrQGU` zgU_3&Qn=@2XLg&RdAPZOmY&=E?)@<4bUSkS%@H0{?VCTy~a@>;3Xp1 zXaF<2qn;<8ltARkOlSLC!vBZAFv1?(&cf()wLDuS68~;DnGuanFQE~(PH#ZsP<=L( ztWqD9on5)X$+!yrkdRGe?x94f{G;8&{IH6a&dn7yja@gI+r>e*o_=cO9^Y)d79fVcU(V8;B-KU(%wU%9qKwyvlMx>*0lQ!Gqcj*#aXdR~#4R zrQzs&5FH&vL`P-V#m3A5Uxua9{FdU5_mPbW=wZca(#VIh3&cnUYV~xQczRV*(bPOJDsSu?9fV?RJQ>0LJG47jQjtu&<7>jN04 z8xtPJ`{RrY+JwNA`8>>HCv<^36koqx58Q$8Rh@PcTTY?|IZ}mkNn9EVw57I6vh)&k zd2=SBH!)Qyb(x>Oo0sY*RZ-mjY4cjRgSnJ1YR+z2%$2pOY!0ivq~D!1TZWL%6pJ0J z3!SQA;mPOd=?nNRzId`{A5$f9i$x)ucfL7RO{Q6q#9GQ$I&Fg-?0htOZf7X9QenRL zWU}PZRB6P&cbf=Nj|PBf1T$1`-QJA()4!gKi6|AuO>75huC^7XjqO9V8h*l;WUAHf z^3Qh-kBxN5#_!6-dc zDZ4rn@+w0s3bB8C(1GoZ5aH?4swUuI&u+1&xbfRoUCe{4jaX;@uhV?{D_D{I!{?6+9-F&OWcFS6~SJh*>IxE!)%IqYHs#aMJN~Sc|OA{t>ZX6mi(nbFqc* zZ*RUURJSOKd_87d;>@aPzvxaZu!_2#0yKME2Bzw*39ed8AnJivbfT=JLJ%MXwM@V#6vyZdoH=Xk-$K4fR_V9Tun(X&+S%A^FWxqDk#EQG8Q&TG|Z!9istgR{H!*>}c?D-fm=OT|93L~wQ1M+5R!lVC& zkbWifa6pkmCMK+{LW8}9iUDHz{tLQ`enOWAV!q~zpL~rJBlVmAB{e?<2NANsYg(x! zm^eO`gJ^dL8ddZpduTwpB^Pi#J+7H*S`=AE#&;cyRm>qBuPU;5k26*G2KY7_v!|S z4SSfywCIe~IUM32Mm04J5!$Jcs?_sSYef*neyeg4FiZO-bbqK9CIL@)f$o!XGI&)C z50AIb+0~7(xE>?|VH*$S!6C<#nOV0RyKyl}f zwEcT$NTM3T^Cpjntho%JuJ z=8+`mV9jDZM%-{ZaRN)iQrh!aOhQaTl5DKfr!no-yYgtod3icK&B+(nFy~)_Zg%;+ zz^e1qJTwa1G%DqTPrpCn5s^gLw3uAa!)`vCwhb zcRp_D>LXZjfofz4R&V9$lkpjj-GQ0TnK$tIr~-^J7Eyc)d?EibDs9j~QBjq)2i2s( z3z)O;`QUAKz&3SshcEQPLK#y7GUO$j^$4T-r)yU_uCuG`-AM*=)wzx8%1*z0LhW7> z5l_gmsWT&HC1%ZqAq%Sdj9XlZ$lVAs-y&xwR=bkoEn zJx@dj^)F$!vb}LmFn(q$|zKkg=Ygi!^)8x#5tcwd}#W`^ZrC|bvWgUobw`Z zt-G-Sd7n8;9i^q`@$Q~pE%co^8$53d~eBaU~IjfivzkL{dDSqo2~ zRqO~N;a*(klNG9JiVf0u#Uu=r@J05uFy`b^X<&6po3wu^c|##Mhx-#8k+w(EI|sLx zR0K~ExO@b_QOPz6iB$bdl)hiCDpM`u38?p5IrhFSpbpeCcy~LclOya3 z{DrA{rwH)u`80_+Y3;`bBW1E4eBV$O>G%lt03zDng?%4P!13KyNGzVpHy9Q1GQI%< zBDFr79fb&pK%0R1x?Ohm^wcR`9W~nSFP7$?2LV_nFM3btqk!<><3Hz?dC`uZ zp0^&2Z{UzW0k*Bw*&#YNfo=JSC_Pr-5r56@PPAVlu&vJ_5Ix=XFWTi*`dz(pntDk6lAox{t z$(k`cLqVYk-ryQ8LccaRK9NjhakElVc2&Rxleub^&~>wU+ze9<D4x~xE2Haf z)K1E|Eg`TqpP>j}skC0h@+snN()k#O;7_7?KH|cmp!*ml-W?xAs|iJ)Q`f}Ju&F97 zW&G73o-q83*WAW4JmatjaTYWLD-_UYizVx&FL(foGc$ki6mj5e^;tmJziUdWYX3`t zxl5jYO$o3mzlr)$gWO0WYKIK4%6Y=CYmV&C&1%W1XShHybbyoQJ=nuBLU}JK62R~F zb513C;j-2&7=1Hy)R2_WKMV!a#v^6q;70SW_?Hem;NuUP{S#yCwMK2%!nhJldrog{I62#<#{~^(l2%f}E2RQ@aHc^Upb@22Z5fffi@b?9p zZz*EH0H8*WgGdug5doQg|E>0JCBAnNdvB+hJwQ2h;4& za&0c;F@=-!s&PNTDrSGLF23oxISj@kCElq#$_Q#L%@j^&xW)?t+NOR6A8~>D?*=^F-)G6RE*h!7a&2GS3cIOO zsH>~*aDq-mZa}u$z;9^d1%!mek=ygCtGBe6##7;QtcWjER21*7jC5h$zenoetS!!z z>1sNQzA^gOP<*BKhJ;VXKxs9!WXz8li>KxWOqcq1bht?xpBgY+EWcl|R@xzB&OWqz zq>!y4{Sr%MK6t-3=o=JE_Cqd3O-0}LJ?f8OPA$Lka!W;f>%O|f2;myV5kP_BHPxH3 zl@*wi&F7GC)XIY>Tv>Wh5~~t!GV^i zmk*1xC?l#I^3EU<>Z#EOhogD>J`b7}tKUS$dl)_-5W24|T}g%f^!46UoyEPQDIFO@ z;iL_tODK%jHwd|Oa7O~CZWF;sjtaBoYRmq!5DqZJ z0SD;55UV~KVYxSW1tryJ`he};KmcGi3V`LhM@q#u`i|Ha&d!m-;e3&CnH{b@lo!h4 zag<{DMLedfO=PN)Ag#t8UdxGEP-dLt*`kd9>xb}3Z77nMT$J=UDif(?` zn%Kv$)-5bX_J=cl-SKkekgDT zZJBeMTLU9f)!BucU9~sx7#P8t)U5%*s8k8N;aHsbALEPB7;oM%7|&uRGh1}@-}Y&5 zJ=o6!VX)6~8DDt`+)zf7gIhgMY5S6xEVTzs8xCNb+xi~tP2rJ01^Tl&@(#_Ki+?gV zoEycua~PR3cujLXrMItX)$(|OS{pbA#=qE}oS4jN{;8XniE}@$*Swdw$oZi?ug3#^pb#P?WSK>go1y?%!bb(!whLnC6h-9no|%bv=ke%p zv|PF3!qpmj)hpW|B2#003sR&|PWA1QFwVUe{3qytflRhO!@SBc!hL`BDy^0fhQ)}0 z;<3ez=?^q2IG2pi6=z>w8Y^^{6zx0+X_GGpcZMoF*H*9mJo-`{jlzt?<&icwx4MH5 zhzIy33lG+=$8dRFtCv!Jcv1bebr83ca_Yt zL4=Da_kvV){ER^!7|Nj+W>m(0(zX6}EeYIi6l4!(IVl}WSmKWv-pUoOK((G;m6H~q zpPKT}Oird+usc`KM|JUa_i)B&t?8`?vUS01rf$AoW;2=cRl=yN{8YvGdAbzc?LfT1 z)fKY$Qog09t)#EMfRoQc8zv@2ASYtITCH)gadmsQhC)%o8yjLs-(d45_}vP<)mNr5ztvF<%mIrza|VwTg6h>Jn}bhu7`J$HnakktkR4O;1Xj zvwZ{6YFHPHO6@&HELEA-z8ZB*i3~6f74&^Lx_^H*N#5$C&yrMY6nHNdmnFVnv&+lZ zA4{IMQBgso4nJ0r>@;6XUdW?(A2D~jF6eLr({nL2f6 zjt6Oci0g_vqZM>upPT>5NNQ|4U$s$?tIbw^Plq{Va&6v}9NFtLqhYi{|ii%O?9bMhrR+0yH= z<5_BjG-z6qz8-~TK*3lv3H4&FnZe@4W3qdx7ORaIrAu`^F`|f;MwgOY>k-28oQV??k-G=}23?_{ zq43(y>S=KenX;8FMyidZi)(V&spzYriuPpLGp(`b0J+pNK-b9nAxb!@sA3M4^MH&V zz$K7o26kRyH!zLcQ5V`m4M)BfopG4jzC`b6Mu}=5ZRXn7LYX-8$L+FGpJJ@qO9EG| z@j}xvr6*Pk$Um#4Rf3nOQ=wz@C;ICq<0x@Ml^B7CLT6WJ_T?Nu+ajJ62K#r|o)@2i zNXUTCRo<-CJv6fZLvpynv~gqm?UO#jt$e)ecFxUn?KU|ZguLLkmgkcNXQ_5JSzc>P zOG%%s#Xg5JatXSFDHPP~3nLkY)G7yYb%{ZIsY-Jl)R4qNd!16NkVG~HgZP3DS&{4Y zzJ){afHMqiU4S&_-Eq=P7L2+?KuK;`>1{}eVBgi&iH0I?aoQL#jB+_ZTFpF7idrijpZ>^rBZdc^{#MS(%n)jiAU2tWn8OBNJ4vWkh0 zMRXdBOi*(M=xa-6t>fN8yxq7el&e}6-N8tbhX^L7WV=5VrH7@}2PFuK#+0|*4czJz zCp8(_A^#&$?X5*C*3$=v2K6X0%^9`Te6*TMF+@})Cy_0Q6U_f@Vf4_<{aUEgS;k(2CBR+k5e1H|`R5W##2K2psP41Pk^U(*-`)?I-bP z8n7zIf^<8BPZKovlOkqi@Y&}FPk5q-{8L1FiWC~(6cI=?>(p5X)E)Yf<^~q9*tmRL zE+BYyk7jx-q27;!0fRT90%u2P9cL-*;z`(gf{^>@r8rCgag2cLo(YGQYom z-SV@i6LrgHHN4ME9A+?Ro78aJpoS7uzCEtrEsWW5^Ssx!IZO8>Je(D@i?Isiqsx5U z!}08{p(Yj+_tZ&gkE2wnLP~RQOTPb}XKUr?`O0Y^nNDVFnoS{bp8A`u#RK`&q|b!y zZvU~VwI2HiYS`9xX8Kv*&ArVUlEIym%YEkVvsAW#FB>GOwCagQSB1k1Pkz3*l{$yVj}s zuJR2A4ID5q!~3o69`Vp(ktAyC+DbdKjPx3%t^5pPQC?PMij%_LXrlQ|>bdTI{{*RY z1zg(xMT7vqkXmxhRr$~~AeRD7ykL1XV?a$ zBrjaiZ3U6Eth+~NF$^k5*!RBq_3@%^+eC+)X*)jkZn?^g!Vkv_BivVha`0>C<|i%WlW!lLF%{8S&nx zOFV(q67nbX$a_hq(2aIZU#C!Kq zz&2?bC6e7@Y#483Bvo+Gdgs&~h09jBV1;e@@)M;%Zw@Ri2+bjDFAEE3{YIYEu3NTxL1xeByB@-*IVMole12RbUa)4QxNF@Y7@lTJrDyZ#B%Df%n>lK9@u(qGoTOP5 z3O!}E6?wfoW>%5hF&f;BIyTg!AQw0SJdJE*filP|@(0E$y*#}YS4(=U9KSH(T%=I6 zn(�-HD{RV9AX{CRhIg-(IpHz8+_DTSKd<91jv3|D|(BTlBnT)$07Jql_CCFFasa zw&xO$MU^A%TuBeRkQ|^y^}QK-yAP+zqG0ws!mLr|LW1YmD{9c;SAn%^hz zOz}PRN4gDEeu#F{#ha^(?ks0pTKhr0Vu-8Fp7$3FrPRx-)?%2=g#$PX5$;{?ec~bS5TYm#gK<`_D-1b!^_i(XQAz*X)tllAG303krdP7fWHpszj zKPkDqWge@1;8}G~7>BWQ_o>nL&g2~54S&c(+3aPRl~lQ*0zOO_7AMXY=j%5;U*uQ+ zN|`Qybm4*c_b8&>nei%`Ib+4;%B>N}f>-tttYPR?#aYs)Dr=G5yj|V8g&$E`m1GS4 zXDf$gYOYcjZY5zInG|q50ml17I&nUHZys-Z#^JLwB0ZB-Zq72l|BN;?70AkMX(Nwb z<#b(7z4Z$MNj3WR-E+BhDQ51H3|z~-5aJyyh6Q-zZq)rjAC9;U;WbFU;X$Zi^ataB zQ~EJZ>WRQqiG(v$M3S$i6o$YxOo+sj>qp6JQKP|0x#Jp)h5_R#23t!gaAF`+#oq$m z{LIsGrH(q_c1V!<1|f*tmR%v?c&QN?g79OT(^u@f#^#K#Jers+9_UiF#y)WHejd&? z*?Nhx2u!#eEeJt}CFTl6j*z9RDEODyeMWxNRJ(7fcV&zz16Y%hOQLc7qy$3s#BhVlzG%Qz| zsNrqZ`zGZHKa*T^NMtN^kWvlecA5)4A_`Hmp@)EUfKqHmX?q2)9I zsV=6{X2|J%v=}_omp0zsNEv#<_~KaZ(z8;w&F6tYIOdOt)hv z(4E*8|FH>IMPwD5TVG!fpC`H`(d1t3OoAmQIZq{91U;I@5wY)9&7o3BTQQj{HsDec zKm5s`BP$|8)n2P_Ac{P#?Bs%%7*`xzY2XPos-PbNv~3eUmYb>Ud?EswPex%^<;C#? ztV|u6{~&3{iijBcaZDrjJ{vnAHXkz+Pf5VXWpGGA!2CgqlVWc7aF5^{6)k^RSz*q} zqiDK&Q2WbM7t+MI?{XBIb4bIzd7=SmwX&X1NobMj1YzYG;fd#RLZnmMlGu1{;?3zw zunK+Nr^sTwme`Y`kZQwGLc~nfXYTVV08T&Sx5rT|E$J|;0G1Wbb)q(~3U%<5rI0qq4U~+T*b(Xza zxPE>vN$(`0k_>*wRcz7$r|6fd$GK!8K6{GuNl%x#RJ-9MEM`ivQ#Vz6dO8#sP|1;) zTo>V~s^k&&(#jRGJxqkG%1uz1*nBFkHs^J!vAUWc2v5Gk>HQC4AzeHfsz+R&WQ^fG};wM4rI$CGAs_%jRjCvQeS_meE5Kq)k{ z*-hSmb|Sc-L=~^` z{6T_`&>ji3RJvG0J9G2W+NZ}_ELX2tWst;Rju#lJW4+$-qSrUII1#vTI9J+Jggn$e zdQp3gpl8u(@op%Qj&Y zsCB-!ee2CGNRv*rItgH3McEuc3e~r{T`Is0J_VRg&s(E$xr9*AZW$H_T_?;Y zbGZBIEt`7*eFfRP*+O*GwL~v3}A7=N;hK#>ne zer(W12tleeYm&qg6ui-SV|d|xfBX_^W<36e2N9;Tn~sd^EUf*d{g0mYp}T`}Pw6~8 z-bD4|KzhR$`h&t}o1I8~*^A@|MZ>H~yNNEp4{^?EE9Y0QX^2u56dte7N-dugt>lPt zzAT*b4d|$qxxJ9Mx-2r8}@S zO$;#;e|Vyg`K*Lui$cNF!-#x0O-;O+`hy=WDH-02;?EGPzcGu5Ehv^4#@lm$)h}6i zs6Z52%z%2pEjLX@RC0SO8TCAhVSl>(P%y1G%c_l|K)wwBi^ya~$CF1@O)$kRemQC{ zWoN6sNk)R6-;5q-rV>?_)p=Ew)`!=lR}48r%0UD#GGcqyxalgp+}T3EUVthvP?gO% zt}17zHiYV~*p?luk|WAoBHowvdd(9lBg|{quPLcfazt^{s|1jyVp|=GYfe=o= zESODt&g~bKL`G(@zOh~)zF>yXFFvd8&u5EBSb03NE`}g`_>o#mRAd_q*HfK?2qqE& zU){}$?2fE6dlMezGnhgYvhhjzI4M*!%zLF)GHhALr>Z?dkB4Ji1oc+IkU`a~sBsa? zeT|xPimLSt+-AXokAhthQt525MUb^I*1&*Qc0le-gw}C%Ts74Gsi{NIc#6dL?B89! zxZYP#EJ#VNDhTk-K~~e_%QT`tFtOo-5&K}b#v=(f2Od$0z}(=J2Qelpp&Ag7LCeqJ z;0r6y&TWK4@&v;o82Q~b&esm!C$O60xe;bt^i<_rt#Qu46Jg8!4)*+s9d#TFci`c< z9BqG+|E$H=y=QGUUn4QQ1cjOj<>b;dG=ykK?Vsf`@DvabB&4ZQM8t#e+z<@Qq$seT z_&%P{ITG{MmVTpEQ*$*S=Gx`vh%==97RNsxg44TxtsJ8~;J3ZvAHfVD{yzKd`P7|j zV>sMTPsyUP+jw9Ml5^(p#)Wvfk+kOxNcf^bPj1%dvK0eh#h1#&Fz&!rk%?}`1= zz97RejIE>ndKbV@t+Ki}IG;ev zRygdnI&D~)=@8Q)i~kF~@r?dNG-igTc2t)I0{}yD8b}h2Cmn)3S_8vxAYZ5#;H`7` z=7@9uLPdXr?3UZWF$a@eA9Xfj|2I<)8TgKX68s^D-$5{0qIEK{$BCb)mpfR~w=V`1 zwzYIqi2rle7)Znmc=Ug#9Q2=Q_pf&I)B)cX@P8bNyfr%xP5oD=yx{tynQ>Twtyum6 zj2AgwhfRO7LLFw>K{pl5C54y zz1b6h=kXzmmY3jDo1}*6XjSvfSb=s36z~NVOrj9r?~*Q^-+-B=jU7Nd+wRb9-CkWW z@XwgSd>D?;?eaMBJ!vSmiKRIH9=fIuteiQX+m+&ragk9oDpMssW93Ky97n#U1C~zs&z;+!r?(!(GMP)Ew z?OfDcZE>D>4COL<{VIG&YiJ2P2>M>5+1}Bi+HCCOPtonrk@;;byw$=xGTWK-8qpNU z!a2^jVXUB0slGj)TH173WB)&FePvWtYrD200*Xk7fFLa`-67rG-QC^Yt@NTnIv3sD z-QC?G-QQ&Iecso7&NqgG!4GwH+PK5g{?Ot5G@ zk)P@GC$TU-eF_Gw&(+h$%jKvS41n#<&)=V@(DUL{yGX4W1ptAn<0R{(0X8sv25j{M z|AmrJYSiM)%*rb6I&vV!b|HwdIn|Fgy;cT1ob|hsQ^n-;dS_tj_pP9M%v3FXjv#oAL!%8-T5#Z*VLIwaBwC7TD*6p#;7>dVd zBw-B~4L-lZd?El&;ERLDrnKqA@$s>f(v>2Gy2;CHe2?1w!jScDL=rH6?S&aw$t?ym!TV`r?(d- zrc~;6bnXy$U_^?TysgK@JqC>d1%zHiQNVRXe07=|>)CfN7p@JRjj*o(-Drjyq0++B z6_OKv;bfeo^5lD#mT6Et|5c#~gA@DtQ~jk=-}b4zmY|`+m zx*)wcJNuuC%&6#hlV%U}59cWDnUZPUx6fB|*aZfIQ+oU!b+D@oERX3K^~2R)&$K+6 zY~HeR-C^WNruf{{GSef&8#lY%f)5M3oUU%si`1L13^Z#S>Nd^-+1C1F$wvvfL*q+3 zTfh6Bjl-&g4K7U(r8&j`WWw$I(gcM<4ra=9x)Y%E#IZ_S=HT)0GMu*ideR>O9`3`m zIKI_;ZX@GP-LZ*%FjfSVgTCcTMYvn3qgoXwW?`V9@sGv_~~ z9Y#UDS)YzuofQ~nkxHpvPrCd{=9u1M9Oa0WJ%|K%FYNLD3N+yx@Uf$CeM-H;1@$=I z)M%#p4HTT)z@e!(z{d(?5sQ~uJ*kOdJcCJX0WY%cXVQh4#|tOr{XE0-(BQPBC1F`Z zZvQQSLZi$rJ~^22Fyw0*MwWLl2~MURX`5$eq80PP8T*i>^Jh!xoh*=Uln7$>H1Vv* zmy1R<9?vX7$Uq||n;%S;LNqjq>`xwBTJemGCXocW4@3g73R?Ho#R7i&G;lwb$}B$9 zcTP@Cp#tXjHo!nHP*{B=7K=uyN+98PG;i5La}VSi@F^L6O+C6}*EqbTO+9(abP}M> zRJp|5b{oeSG5;6c&g4ft&}__Mm6me1p1CO1Z{a|{{y zz61vNrH?&cCxLgJ9^4*3KwmEIE9vU!lsRwMpOvncDn^Epycda#*AG)=Bnmj&f0E=n zW}MmHIx=TT*>1?f9yd!SF$d<^BrzHe;UBwpEht$USQU46hC))}YKe9r= zgR}m^dgw~B6PvD6aJJFXCvz6SFVE_c4SYF=y4yL=yz_he9r>ptZ z`_Bs*+P_S0e{UgMqlf>@7D;k{wC3JU3{Y#*S*kJH9j7rtr%JBD7P+!u!L_;UK2sVd zDvZ)0;f}MmMlCfR2X}@TJ-JMY*mBGVgcyk4n1<|snir3Z&qjoUt4?_D$bfzuk658* zYQUoR3qDR*0?>i6z#sovODzgON?_mM*e-h_DEi7sZf*wS_lab4JsZlnqP`iB( zxY`(+7b;Py?M}2wb=+t4xItaj1G402OcXww$63xtx4KZb|X>m?6Thc|N1zSJ4zAL6c z5j@<_5+)EDwEE8;83oUHb^6L{&`2fv zPVgva=bckQ7Y&n)Rp#%zp+|$z{Y%e)=uU( zoi0?08$6^lEC00GGS}*hE-&au=`V^p0!5deU0RRLN{!RwZ-~xUn8POLtEX@y|J?lk zZlM-gAuZ&*9KtxZl1HP0oZOljF>s5+`K)4cZwk*db;TV&cy_{0iPR>>!KZ|FjK*p! zm9e2w{Umd4t5*7O2!uTq|88G6cbVUgF<+pec+r~wuimA%4H5vC;||81(?uNwwP$~S z!)`{1R8qv1qu%pIguhSME$*Z$5g^ohsF!l9eInz@+)OC++uwdY4Arn6P9&XBeUBty zch$pN)kHb;I|b6>9RzEi`p`7Cuj)Dkv(UdjuGauU0x@$b$6u>$7^9Y?GrepRZ+~J8 zzDeb#b%0~6gGIA3ts|a)eYcA|$Uc<+66qM@vJCgQbT^n(wf*b~gz)gFNssgNK)Y;$ z=?@->>;D!I0dFtxS-GM&jxwGh&f|W|P&L;n3=*gW0R2)mc-@+tDVCn^Vs^DZ(!_9~@_XeEp#EDTLCMeU)tM{ni!t^GD}X5m?Vc zAxha+xMiFf9-HjJt|*KKC&fbP4vL5d=ZOHUeIlVY}N~<^tP0d7Y#8g{R9(4@JQ|8sF$;o1X9HM*u9~%D` z_VO}+dFACMACc775mA$7`x=)J6Tloijp-#u>RqxwQE#&(l;LfEuZUV>)RBFt*m5)T zp*X-Fobq zKXD>wJr~zgNFj}F7*8r+hUoU;<8ql(1r-I59Kio@F8O<8nx&~6^J~w($e_cE$v&YU))an za!2dYT<_*~zL{+rC9~gBPZwzfgYX$i#1h}*;%fQ@1YnL~;-E$73PymAYz;?L3Vtv3 zLEc@t!YBvc|3AymJ6W_<7)N>meRn7JD-xA|ch(Yg{Mr3k=WQD5 z`v&biyyMHQOB>Cf#04HIUO8Wug&3;N!_tLrPp`>qCqpq|qQ=e(T;MuRKGND^= zFx-7Vvsd@mLx8GP^`q{Hre~&bg@Mt05liz5_%jU+O#+)?+$B3xd1R|6i6mxQzRo27 zQ~neHWMKdQM(T^Gf8JI(gUh(MN3F0WbzeCZ8PBy)&Qcp}7WnMis8gt%06mcc^90Fo z8>F(YfAQ!yI9BPzC_7zOXoGrlcc*h=!c+Ws!AQuD${T2@& zpNGfJxA-#0g9h>V80SXo0TL((weF{jU)uSXhzrV&@IwME#Hcy_H)>Y>q^Mn{ zF3KD9K;0uvoAx1j+O6$?KtuNK(nvm^gDQv!HTj?QJ0oela(aZUj%Ja)y+MSANKDkw znvCZka))k_02_O^s%5kH=DvL6?ewcfSIf_)DWd4q+MT0q38#BKdF~@kyAQot zHzr#%PI(<~n5_@p?~*8Bk>XdHyX* zLnagO5wVqo28l_ssf5N^&8g@wosP*a8`;FO<(Pd$uPQk3V0AniX?ebb)mUBzh$^&Z z_4BIB4odkPiba0N!deQoUtiy&l7+RsfhHWVQ3dM|nL0BuF(Ia1uibzN3^;eU!?DuW znp7PhgW`%+w7VxsgZ25%O>3zs?_s383~)`a>`W2Asy1U|GFh0|j;^g~Cl6Ree}MlF zUt#;MuTp7%K9XdXEzq1GG1TBW}tDR$@NN! z3Kg12?D}bY2&{Xr08qOact6GC0SXcTH+{Kd$sDsJJr|UJQL8aa;EFt21ks z@kIgyJV>v;Gx+h|kIfKrP+8<02l&f{qfIt-h2(k{!VK5dY?NV-BCXx56Ed#558P*NI!sGqA{68cqns?uUgz3;X)lLW80=s3ac*P z{`xK67!UyytO9GJeNyjN>6D1pG8B;>}k-P?qJAN6Ql?x^uFW~C9L4UlCEu&9&z%9^$n8W=>~~d{Dsxx z0J7YB3pqTX*1;S2ar?pB#_!LEU%Xpx~A8q8Cu_cg;~@(a9LgNtp%~=_h+*%lWD+W|I z&^|0o=WAM*w`;VE0{KUcq{jf1`@0WsLti?J$s*;A0PFgJi&2%Whaqdt5lG0JI?#u@ zx<;f>1=2(pnM|&y8|3qC>c$Ll9N;9YJ%@QoGXwzd4?qOuhqv!h#8ABqydMoN$%<6! zAbKLOg{#z5NPsj)efL$t(?MHxelIl9!aPK?g&Om&csj!bqLgup>ZaI7rTn&aA1 zi6%J_HLiz)5iV_@c?H}cR|nAMX2zNHp3j{vO^gz%@NXfZbo%UEYr0}j`2JFy2dc6w=L0kLNZ z`-3rLK!b||GNK?*D6sqVAgK|_@kIC{s~_PSa`L=pG7nK)aXN>J)Khs}CKwRjJ6LV= zJY7o|>zihmD8w20?=H_RLHI%`!pgJ>jU^y+sYC=rTBt*iLG`$^;{YP7vU_|vInI?w zenj)@j1{cX+$1G2HYe9p1oo@WXou@;)W*i9@K;o_)$DgIx2&02NHcsA#o*@(N1Rh> zc~x}Hz0X44QqiN|U8wOhzo2oym45At&KI&{XxT6gmZ}k-v_0SLL7HhMS@N+MzCLRcPJfTcY>cPk zpXx6PU_i{UFuNXolAO;-Ku$4dey;IC35SN~x%r}7$2UDg{W|P}wH_Sm8W?-*i}p~e z_K9Z~oJdakHt;N|r^dmwfL4R z(qic0bt&5H=rdZdfME35KMxsgIf^?Q$?B;-PbR*-?}Z)Yj<9jLL9F4+gE}E~!?Du# zAbaSRyb)Z}xo=iHfu(uqPCC3_r!_Xtp9PyI%^|D&OrL6wSEqSJsZvd0fgyXwauCoDn+BKERoqj-4ggDMsg7_#r5Bq|t4wU~2c-_H#{oJ~rw^B*YNb!gmd{cw^{pXYG-sjL*4Feju5;k9zL{=+$o1F(a%)XX zaku5RoouCsvE6})F0hK4%;juCdb$=sIDG&N&fjZ}Ik1!FZ?P0L0bbC&Fec_dAwm~2l{P8+jQp=Y!D%}LBR&SP00=5uw?3MB!>8?wky2RFW_%vMo zqdj)$k|}E9!E+ABo}FfDQb0InM-R!$*f&}HsFS&-g{PV>!<-^*+k z9UNK5$lgw#;Kx8USKzN{Ci)3>1jbDAiUAKwB@1Lu%_CAu+aNr0;+<(9Q;+&b&OBp5MyWZ+<$FB%WQ4xn8C$&d;};$#}zRcc|xc zQ8(osE(ENrQ${Fh^{BuQY4OU4ZQE5z0GF8%=FS?eHkrtoC5^;k2_eC8#fZS41ZULm zN_bp+mWhu) zxt}|e_-J{}q*)pN`FxJh6eP~cG}EFu!4_&eAX$-!)8 zdp5@TrE_IAgrP`$N_C#j+vbQOEUA4b=-6<1n8A>Rh@^n-%BXmlcnM&OZRFHXX-Wd_ zqh-cXNT`<(aS+Un-3X*Pb)TPUct%x3qq1!}GZ`~5q5UPygs8H0Cik&_JM zV#B-aE*E(dqW}>oaUH=#sal1E=+22o`E{8mf21Tc5m=Wm^Y*LvG*Zy+>GGu#=Yg*5 z-r8F5>V8!xtxNA>;ACF-=7!)VY>S%hO5P=ZklAQKyT6f*4Xr~+U}z3y5>uk5Zu5_G zP@1y^l7O0HIOz=TXEu9*w9xxZ1wBF_NH4&$<>|eDOJgNwVcyiunMy(QH* zl?D?DTTMoG`tMVT89_)qgjY-Gdy!8)+`SqBN8 zFxmmhF-tsd($NQn@Wi?0-?s9?Z#-)_!=-j5#}`{er#c_MWxwgw9WBNa2QhXv=xBd- z+LAXYnw*sTm7SCL0QQ4CyXzZ?*Z(+?$R(8i6UGS|N}k-_WI?&cC}*@V2p7$K-g zc0r#m{8v(R^s{*NfPhTKvr&Tx1A%E>0hvsRD|Hr-L+@OylI8ITReqAo2w0W942D$F zUBWT>=Nn`JLP?;rrweb8UyH=j((=~9HP1P}@GNzyp-lVH1>mti9?X<>sLYwh5aF66 z{9R8czxzeljo;|d79W16pB^aaG0JG3@7zNT5u?2=DC8 zbg_DybD48| z7A&b->GtVvK$2^Hgv&!P>_IjAo=@|s1zoHu9%jCLaE%_%QfQSPnHjMtZ&NwcV5 zO-~gs3w)*=1H{A@)?8`J!-y~?xf)+y$e3&*{wD6xx(uKPb8hFt;m5i~uqv2%&Uw~e zth^9PEYDe8MIaE7PdHv|{w&FLD_NwX{AGK96=&fkC8TwZhnq6fWXq1}W!kd-tluh7 zJ*qZWJp!Dp1g0G$)GP9eGwElr>s?jVR^!|*`{h`upLG@WLyaDli}GH-e(jK`+x@%V z+VNvg^I&xA9`R~5!jwPiRJudoo(9tn&?+z|!Sph<=d#`kw+6NgUyn7Ot`D4^T~eya zue(uzL?v~(6CrJT_P=MC_^v`Ra) z-YDQ8sNmR%rC;V%Bl!}DHpA!PHH#top3mWGh*4PjN&Y$dqVem|k(_0@rFe)7_K}8b zzmA|a=}=CZFQGmju2>dKWr7IWZfVCSQb2|J^1fHG_8)V1r$?4qY(vx*d5*nU0jVbp#u7=yCeJyChn%F*csm>jm z(R@CLp-@5lXnyKuEq>*5QVeN})svf={jEOpv(Iv^Qs%A3=nlSIp~9Vqdynv>yW^oB zG+3vLwkY0RQ0%Y5umnkJMvdzk!O8g^kAy9Y@58*nf=OD4g#9M=m?(8iJ0$V=D`($8 zjnLUF!PA=#_{97OVaN+*x~F!Svk(@KhXfX4v)-y1?apwf0nX7j{_48j+qrANKp!p1 z6$+oz7>N=Ly6@#m^NOs2x4>{HMy!E`-+W$Rdptqc={ zK-rL4>6#Gct{CS$_m@=03<6X68y@HDHN>tHWcQ1VcULuS4!O~Ti%bS;!T5F~sY?k@ z3#B)Jm!?cN$yO>34Fwv`Cz{vNGcy zKwp4jQR`tOJftM}GSY?&-jkb7wrI9eLKsNMALDL{<^|rg8w0NblU_*k?)lo@bb39> z__?i;nltdd6gda}bE-H>WI`Yzl0_7s&`PyEY^w9`Gkkox1zb)ynCm5Xbb9Wwds!}@ z`mGW*Ft#M;eGp>Cyg?4kf_EqysJA{Glc}u&IgL+wdz2H)OZ@T#-c~ODBwK&?1_EMBZOnrz(8YNi_U2(CQhnN0?$kp8bsh7w zqGqe2fyNToWSuGc2P|s>dYjC`U-yx2j}K9a(&ebF@}6;jt-wTyFOq696Ea8$O2s*d znSs~ltv;HXHMpYZDu8jqNcBke$6k3edL2V@{p5apVf}T5icnCr(A4Ysw3$WYWN|E} z)$W5*ekrLa#7uv68`+=H)+3*562Ax~lK9NVG6qgSHsX1U5U#F5=jxPW=O-`q(;zpz z)XiOxaa78izxROZXtTV89rbd5#&%8|9*__&XU3R z+*4%pwZhRMpty|}&Z+g2Ft@sSu@}Gcr~KJ+lln(&;QSMx-YMp+C8EcJ0Gu!9T9qgB3hxc9qA{Lld zTwKBMb`!-lYd~Q~Nz`wD*8X->SCW1z&Yitv zyd;H9U4CwBss6g`{8$g)L_q8C4SYY-^!II5a}G>_mZgvH-vzWNaYDn_s|{n=9}!<| zT(7Lq$*Ex{^oRS!Xl65_6peBC4*Y{yRT5f8Xy~_%OM^`QJ-ydFR5|pGs@*e(ck2GZU z_H9ti+CjZeQZybZce-QCX3*vUzLxQqRdZ7zwyX-)r5No`TStm_DD{G3>Z9*-fz4!K zb6%>J>jZeFf{)(txnkWF(NCqbZ|#rg;Z~k6u$J8(<$>F6KHOg&dbhN2&D$RaNu<=T zVg|GClGTnZH@TJ>efM6SuhV5K?+j5=b46|6z`cX!OnBf#io~s%$&@x>2y0eh3_UPp zq9D#v=;88shO1e05TvPU4QE<0$^5dPEEusFDwt9#9G$ak?LiIx9YJ25hE}3pfw|wU zsy1J3E^6K7Fo$&_5mzA;FHE>P1p^D;v6JQ&cC=7EcRBdid`#rg>PtJ})qL4Y4L`ol zIyAdAGVH&4m7~gXU(YsSYT1Z4?U!kI+;r7XjudMKYUqe5Ou;F%+FETJ{WkBari9ba z@Qn@F_n8rIZgX)*?=wTK`m*C$VpW-3rst87x;13uAoWMJ;595DKaS`5M^NP{mq!}x z3?CUy53+pqV~Bh&fEoz+Jz{$UO{H=z)agx^A%JC71B$u5TNvTOn$bl~=*m72T=X(6 zX~4O^KH*g}D?-eeDo~^@$(Nl|Ek%div?_?gvcC?G3`PE->_)GHI;mdoz(jpis9^kl ztR2PqKRI?*WH7UBA##EsIeOhGxjXx|#@kv2$c&AHYfTT9t%Gs+mMnot1MVnU2@rDB ztJ6VQ$(VX{IhNa-yLuuX&zmRJhn)(^(DcWBt~7K~TM8-fg=%%zu-ZKXWbGMgzvGn} zcoTg}(Rm8PsOHmBLy^Nw4csvx!1Aj__i@l@n&-o7pnp?aPw;$HMkveb43L?t7hf0$ zmHm;_N9M;Zt*Q8Vy4qqsQHJDTX@>_AI&if{Qyv5DvHLm1`%Ok*`$!5$?j%@^K}b~8 z-{TbpRen|F3Q>ITC%59xaWHbr!$q`P0*4V3Tx1aU^8ghuaLrLvTwH)Ae`|?S^x=@2 z$yYbkV{&mB|4j2bv|HS1l#J|JKFxf6A=JWH`1!f_uT}VRnsY9s6D`B%Twly9Zlam@@!$2upY0 zNO43!K+xjRK7iI0&qrciupUw3FG|q`mIVq?`aR)E;})^o*&YcohU6F;QG6#(cr108 z1}zU|jbFujYRU}?+F*+6Z>^72!otBAOfDz@ak(X>Qhf-RHrt{jIYO2i9Ak?&f~W7y zMna};@;y1X(Xco)FEXrk!Ime>GCQFbvoLrzt*prZ<Yu zq0b|5*rx4n?mC;rQ8QGordyuj%x!7&ZR4q&41epn1|=t(vFKXHSSN|cy>UOOKVp10 z|HGR}!xfI}>5jbQ6Dx{k?-MBp@!E?-?fK~exd#b}a5Mh0}A+V zaY7QybRRv6fS_eTOe1^f15TUs!RSD=XaPY5*Lnx&U$rpqspL`sHQsYzF{4kDE zcuGH9hZNI+l%b$*{kTXeDk9RpE&g_gckg=>NF|s}na7PiV8tnmBSuAF#B^zX(Ns^} zeo7t>Ya$OXQx87w{m3cuYxX!wbra6Yr!+`T(9``10KkEbE#bRAyi4AItar$3Z1N?G z$Z5R(nA7}Bm6@3-ORWwwL|o;5;L1wQZ^DJ29| z8d=wc3)DKPboM>7O|lk#54{+m>ajaNJ@{-nkgO0ahdtd~HO{D6kDy2_Grkw->$sXV!2{X}E?eBS+Gz6YZzxPQ!L zxL8$`bGd83gZ#g%L5|)Vzoc#&vKopK1O}nFPHmG3Fns#~Main{xTr%yDpzZ~0fqc( zb1oS92()i==uO1O0P{lyx1{dQ1Wfx2#5nU2%b@1+;)1U_XmX|tzaZZi^2+J{@p`P( z)AQl<@!Eln+PLSBt>^Czh|tOg5<4bVq&Gygt#>ICmPye0+^Jv!hr1j=|4XFevAA3JV` zx#!$h>SJQT=ZV{IxkSuVf5c+?ZQOu$yPhJ&-~8A`q`*|3ksm)7nDS9se8#x+a^-qQ zmf0Q?3`rL4m6q1xwkA_HQb#+}UiXhjrqvwcx3@f3<#fpkcHor-+0}X6f-V>j$P0}9 zqV<%6RoR;1o^<}4BYNXQN=%Fi2H$Gr-j>u!XbP|_p(In9pf7x4!fh-%TEd>@r{hS? zW=6~(nvzlZ2{XlrnK3T!X+M$;BQNq-Px#M%-~n-$*xy5){_v)Z`n1?kR$(#2*lvEv*GL-%0@ES^6O4g{AUy&H#u#bS7jt zd)QGw#pmdsM*;vm;7==lRN&+$7|I{>it7pc?4Z?`wnH#bmU2o zLQWzTcsATI6?qQ4&0n=_OfR}gc5Gg)hB5Uyu2w4Mx`HmfE&OqI_>o|#!~dwgyl+y8GHD} zyf_)Jfc~$rMj>Fz+~P`YA?F5q7{z9(+1UQDV@F&5y*53C zzze)|SfOl&K~oOGXsdp;7VHq-NHSTsY$wius(j=toN#u8;YQ?wWu9}4boGNVvi!nHPRo+Yn-Ie`9P2(P5K55IF(i}kPv?4yNo`(P6`bf zL57e!z8tCkz>3o4xg-Z#rzqAy@Ba5@;t!)_;d<*b*(-sVHXSH>E0v_F?Xoq4hUr!F z*S^Tzd^1T(WVo^kD@DK_&Rs>msF4(nFmsc$`Z`JdO1kk~es3H8m6m($Jo|1AIj-iwt2LRqzc8Y+B8;6DPeKlazWJIMa8 ze8vYwcb24zLjU{U$|rFHG2v4l7$p(^$6J=ZkKBu%S)DO4JWEckqN7Oj?{)k4^Xn6@ zdQ7Ux|GIL{)KBfvhn8<|M!KHS%p|@Qp0!Bf$?gA8Av+R%?)fgsML#SdsAs`w!|!I$C#r88 zCnNy}f0z|GDKuw@H$_A6rL~fVpBAdi$axw2>QDLmU;k-8o>jk}kv9}&FRDjp>!R9X zj2reEOWn9?Qm; zfsO2U{lDM0Kr9`bXR@k{;=q@&w@)AjMvNMsy3^T3yEvE2!TEkoAdm*1u8Am3+h&&Tnz+4d_csPh?k z9o~lhzHV#j?4FyZgYRi-Jj>9ue?U`uqX`w*#`XoFZaw5)K72k1wjjWp-^U3^3t13VzUsgy}s!Y7;w6&7^U zpPBu5^FPg!H1JI{o<~ujNy~+{rcfrWmQU&`H5!cx54jew-QV7>P`;%dER8B zC+q4l3vmkkc^$rIJty_Eb^alGvHJx~JLaU|(LqVqk9qsieV^2um5V)QVt4kUM+AnT<)XavW5l*{VtiB^On5PVlB3AgT`;a@R&MXC+mm8vxsl+~6CCcp8f3IL{qGjcZ% zN~y`@Ehdw(kD|sclaDX2n0tl6&^NPLtBZzO-031^|CmiZS1N&|tTs6c05>cAF z=_FNypTp#tcBYalMvN$h(FjC9IQs;hv` z*}A_D5r@HkpgI?}=}d7q+exGOaqctAXX>U4GN=z2UZu?-gMRkSx>Tp%Y1_4!vaPuP zd+}PKAz4YbDZ^{D708E|v#kbmpS>GKK}3Ig^h8{L#&U5UXrt6A7e`^5Q$kE5#s47h zL@Zjlh!R|x6qo4FtUTa=7^n(C^s^+%xF-T)WJ{#6AnLlrLI$s(D_WJA!`?V5y1E@rN#mUf5KjB1wE2!U0+-98 z#(FT1%TCEC&w8V(AHFwrvLo=La*Yjbxa&a$+qeI74jF#&7D3Gf>%dKoa;NFLwlh(~ zA8hW%CH~(3ieK^01_58JsY{#{z z!otD^sjuHau@2>y)Yo&#X=3*70__IxO7kPoEv>Lrl7Gvv03^O9{*SA^`3_CTk@DU{ z|1Noexv+!eEdr+yL&jQct%0VpZ4k=v4Lz{ytz!T-V!b+~3HJgcO#Kt9lcocy8+F46 zM8}ojUICiZUZEcZ(!aab@?Ekb`kEzBgQca%4Bm(Px%&Gez`g&_8AFPj4qSno1Elyt znw*J=4n)3+jRS=MzUxYhB`8i#&YYnlWxBC@AK0-bS9qh&rTHIK0Pu5?VK{OwIi+i* z_P&GQhy570*JqWmdUG3hoku}BQ(m?b!e|r)HT(*Ew(ZvjijPg4E1$> z!c@%LoJ5CYp@h-{n*iHYgU8z3+bX4V)W5KUH4U7-^iwzF|L=bsv1`v#)N}~ z245A zwLH?7UM>RUDZb>lhK6g8Gk}wQqpy~M1#Je0Ei7fd`o5)eV;0Z+ymBJgj;tyC@@4G93C{9s`JSHGu77$(dURx)|$I>$MJic z5^+zlo^r~srtD`UMZ5O?BwMdnl$krNNrQs_b%lCYgn4E0SIS%m6?n<18$)L6t@B$A z$IjrBo?i9wN@-BpbdfU2$Cv~tiGBWTL&`H!z9Pc;?@<4!RuY~KBvdMDn z*j#+hXbxwbuNniJ4|{kTrwDcdm(^VO9ehnThNAQ& zPIOIT2(C|-EobZiy14NkbILg(C`bjv{+XbAe_v^$cFs9ZmAW6CC-h`@{N7_ffti<6q?ne`6aZL9#3ZjHsZJ%bVzN`SFD%nH4bkt?CWLthUS%8EZ#<7 zb2|0sT>Px~L0~%L8?QL`JmXpKoJa*e59ui zhsD6Zex_S~C9YI)XL(Rla_UYE5<2nA{ikp54JCJ#7klC)_;nY zRu&*)%fQvFu%LzO z8bv6`O-};V$G4Cd2aW~aF*c-!u%jk@n05S;@ z3Xd>0`F%&gH*LT0S$DL^nY@CDhSx8-q_DK8O=LRxP3bsGl5Xgs_QmC8M^%Rf?>Sm= zv4Mo5eGg@0K<%s>&CW0|P6k6G9rgsr-wd^jm1Q56$Fl@KVuACbEmx|?1`|mH^_25H zD&mB3DP(9(QN!pi83dRr9|^;_O_vP2n4DZ0Pc@QZnE_b*r8*jF?`My4mkE z+3zI6MylR$GBbBReB5&_bDmJqGcW|Mfu#3Y0Uycp3aqX-`&!+sdT3Xm0v5U+4g%bm zjSe@d@sog!lf;OfVNk3lvbA@!$Ia){7;3X>%N4)8gzj2#kD15EhtKg{?Ef6g$;gmK zh#Vx#2AqWN01J6t-@3BWmU%t-l+}yRSe9PJCdX5Ii_4ZKeE^P3q zesWn!Kmh-0?6a#{QNbr7&g2%)FLvUr)j3NIHM*w^Ou^sODKihgJ)s>KU!POAKe;!E zFHdRtmf%FhOrp4*J{9)%4WU~lb;`JI3|)t;szq|>JXZZI@tjAO9V+I$egn?|2lpVS zhLGL)YlCWL^*GsE=f}fdPia_f<+Rj|3pZqgN{Q=r9m>IU(YN*maJY(rrppt}g3D)g zN0dkECJ~ASylr;}v%=fzqk5q)x!E`8*SKP_Dn$+UEeIYS9#kvHj?1c!X?3SA*eB2$ z3?G2N)pqo>7U~w>+mqPeiU-%_MCaj777NXxVEThpwSmtnlruF;3{c`*qEX>k)3t7z z*#D2Qw*ZP`ZN7&S0)!CUov;K5?iMs?a0u?fEx0chNC*xAg1ZEFcZbE@-QC^!hAZ#A zH~IamzN#&1i-Osi*=L^a)93W*jp4dlGo?t6+jGM40xgSglXWIM`!yF&Pksit%eDNS zgl?ccRTCQc=@>U44uvVnuSs# zmJCFggArf}1h#9>%|6xzA76_ar2GM!)#t${&SJ@4a86)wjGNu_^F2n=@oahI?UKnF zDF}#}fV*noN4EWToC$GqlFGD5kh6kNk98@r&C#u@2=rD%-4j&A$nvsIek2&&W$7L1 z>3SE@P(y$R82VZD{)+(2c#Uqmmm0^GO8L&HBb9XIhK;0Z$;M!7?x%&D2!sB-UYeQX z4Gv4yV?Rj3MBaXPtb#sP)Ozprv&6*tDvQemCP%gkUwg~*yoH3X33_}x&f;Lpe71cX z2ZDv}T&wYgl$41%Q%p|H)>gCFo458mT?bQ!nKj3=KMeY=$%0mjMXzD}?ulF?j~vsu zcl3$>+FY|t0jIFsX|9$tKz<{T?A$Xh>!@abHTomk4xuY9>~O>*fpph2VXZsI8S@*l zq&p(24YX*QNZ*j)tnOL-u;W7fR{+kh1gqC75mG2C?9~D;Ls@PeDUc z(N1a#;^Q@5bfaseC)B7Dy}_`qe4l26q0kX;pqf!*V0Jo*`*4FhgovLOzDr4H|B&W% za<&oJW4ly5>ugEA93!u?bR z%Ab5$STo@De1#PFRs371v79l6r{CD}w7prZUyh}_(BMm0Mbr4X=Yrb7-d$4>i69l$ z?4ROa?u`8-7C9xb2O_d6wBKD&eDfY-3AIJ;IU5N-3Wat^4FRvZ(^!B-w|JKn`iknW17IPU^Stz2S3 zHcl|8OCgk{^bZ@8?|B>f{IgFHVL7PG^Z-2SgLnP??^e z3kw@}tIbTc@@E{F*!hVYsTZ?TrHn z0vsHZoFJ=dz8ae1XNQBS=TwHlggwYsEGHg~(Z-cL`o!5Hs$-q0f@?TfJp1kN3i>kJ zA{o*Nu$xrx?Cf}hQgcOXHjijrF0gz>G2i2jRlHLO?19T*HoBz>)vrxaK5*|KQ@tS3 z;<|M}$Jo%nFscCu`X@3=2P?^?jAu(Ak7ma;Di4p%l11c7Yzg1NemoqElPy&;&w9nD zol3lejTo8hV0}0D$Eq;ua`}0qOkb<(I~9<36IzH1E``pn8QR4kD>wNFD)%>A%z?Wq z5o(0PePy(=eUm%>fm(5F0nyB@?ynxorWo%8dhKr><$~sus}@zNaZEeBEkj zI%vG=t+G_p>$}}iMhItJa2|lR-nz}1+^nuCAZ~bo!am)nA79Mv)#5|^eA0T6#I<)* z+L7mLCOnYcY~vomEPWP@g}D))=mKqm_M$3JY@ssgYkP98kIw3*Pah-i+u)KJt(FD} z$3K=8x2<>=2ibu-{!~I_OuSnTT3WYo9QZfQkz+u11{nl&a)=+vy;IFrdKqy+3#t^vP>UgYb4#4!YyT-jgoSqzYAy-YRbz#?Ci?%|5W{dP$9Z-3OOi! zexJPB-XXt}#`THkNNVI}_cl({MhUwP@g69^Je;aXj|Mt+9@T@A{Iohr?xz(!wzLBV zY_1OfK;W!TP!`uMz~C7}(Y8*LEq8F|#|9k5cT{wGSM+`A^pwoz1k+pfKI|pil?6rZ zTMgMFL3k-9;fCZ^!eYw~oi@il85TIFSUM5=&0`a+sfC74Rg5%t`+Jgr46YPEn9}zgcGm8o<$-^COeIU-~2gXJdxlW1> z&O;M)-R*Gz4wnPVe$9ykXKo=xLsr9#hDl;K0z|jIXqptG`5AcFx?EmkGpEtzWAX=SO2rl1?V$sr>&e{ zAMGAkAlB;$6wL?5)-}F+R`$u6i1%g}eW;wNAl+o7fyI8K+;nY}o|+3EneX(K`}LLR z>O08_r0fN89Lm@DW1*4&p0OfDYN7e4!Q(0V@xlUjD-{{CS6(G@qEY+B(*XPJ z<>jDP^_R;H4kKC{7cJ4nEBbY^G}`Btlm|m;6p5}jpfEJ_Vy7cr(kx=5q*?%`QJPL#0=$sZN}e^>dLp{eXcvQR@oLLmmLiz=o_xB9F^%Ne=XEc~x+ ze(YlNM1}^m`q?ODVHl=wd{opRk5#-7DKcvCwHNj6Xm8ie_s!F)r)!#6a5b3M=o?62 zhU|Kbe&mJ(WBSVO%uSS(KySZ5Mh@pm>B-+;&oLr1>`A%MSy}?wp~r8-F9?ZmM|;dL*ZlOSi$J;LjHy=7Y>}_HS#Kh%{H(kW9tj=>Mj#U5+vO1%zHus4|EWW| z4*T(uo|l{;(3})!HRs{!_r822F-;pDG8B1fhhX1N$gY=55Tx=ex~7rL2c2%TWpn^= z!BD-mM+7v$5MV}zWE>{7Qf%(o%3(_Ddnu_euwpsXbX9*)j|zpTrZ43nAI0O?EclI8 zvQRs$brCJb^EeIr5={_|+O|ZArOFRjOI_QU)ZupSI)d=YWaXLR<-D8!hKoS??C~XQ z2&E|KYnX`QLh&^VM>sSxHi*YhbyE$39C}aW%R`POKP53u((?Q!tRyr(9!B3^T=Ctu zh+3UK<349Gq{JHNfvmRmAw*@Tqmyn^RSA`AN&j?L&}+sWs8IHE9kj%~;_`s1?N(Xup1 za(Ju2A^~b_iZcEvXWyCJ*8EWJ2DbGzMw^I{FY><0D3L9Nkv}!260LTvX2TT?4yZZ~ zTQh3{v4`znX_Oh003|E@pg0yrz~!_ue;G=>)`76pXLR}@djkx#?jXo#N>;>1famI* zN$F~`=ZH^cPviwu|m*FN4>v%OZ-rnQv8J4)b!ckdVky{nW9EC z2|C;G#D;xwa$fT@+b7ONJDP6M+f-aYUI2(d^%>}uQpK)I*Dl6V?NNsb`;!-jeX)p$ z_ai0361_=$1N)dRr*9&C+fRA@xN}8|jGqt(3Dl~dYv!yejEpHGiX6#-?^XpXaZtfq_eE}}Ysp<~C{cy>x_ z&^}jCu!Zrp@#{r=%etb)EY8~I^e1oHo!qbVxBsmdVJ6?a{Vsi=|&&_dvcFI9@ zDG>HKcstue_xADyNTg6yMC7eI7V7oQK_jMx+U_G)xm%8adB}iPTw^6)ts58f83!NTUIYFqe6j0 z%q4Si2~qI=X@&Uj2kP}AmS=RdF()sx$Bp(+HKBTRGPF-6M2x8|!yjU=5h|v~;+DhN zy2O2Cb~BcDdQ@X$C=I7{Kd1_2IZWuOcgMj8EDm?bEjcO72FS2QIMnra1rOZ+=L3M} zHW`{q(HBg%_pbcxfRqf(V8e4qFdsVXoh7P;<#m3Kba07?bQk@*(_Gf}<3DZ_4ScR| zYT=0@b5E=GyVb?yvXXsvk8Og=b^lhL_>jd!RrETx$2)z|?=#pTON* zj3?zM0r%Ihxqkcyc>H+^S&#nT5j{zzzuuiDPSMW-r|~X5S53d30w8{>qjm&0wYIj> zG}nNg!7UR5$nN*B7lhif zd|~2oj&dV!&Bdml)<#mub(oB_^gAUbHq)`-rt>|nBCBR78nr5voAW(pD5dtS^KzkV zmylkiU(bpkDnjg$r{t|dP0@wf8?WcGsNZ8_v6zKsB?Ub;n0TLHF9AV%b1=IeZUh#f zk)9RcFg%f`kZEr5eu=>DKiGYHwlw?eVF4LpM2+ZZiEo9;1d@!DD;^=216rkqw!Wc3 z^UTEaYUgTSf0HNnEq&en-$H(rn~Y!$NX34g$JR1V9LG!B4bdv|$%I?}3$z`cA5%>? zr3lS*2ti)GdNr8B!iP^l;I)~x30iB%1kf9zx%SwHFIjJ_L(U6LdzQh!E=(w>2r8in z?S!}XhnMIwued=3fF$nl?2LI_b`BkUduffDrpp@fpb&KJbn`y$6ErOJVt?)ayA!Ow zAmMV*ypL6J%l*xQ<#JL8kDt%|5pvzlzW!*ga^Yl(V!Bl9DM-t0Z^3wMn>&*YTWj+9 zLER0Jmg^Q~rj0WDMF^qKiOad2qNd5(z}=2B3+fKbZ{#nahzE2YfE!oUQb$}Mj+C@C zn*fVUUNSzLMO$D@>37-|77BLJf+?gz0)2hMWQT<5s>@|}kcJ!e*RHDKm16rHEYzyX zrTyB2wp;I$qYxh=N+t)ucj)l;B%Hxxo;y7~{q5L-#0k&>^zRJ= zc%S^4|K~pH*k>UgG6uPds=1swW@@gNL{mgrnzd|(x2!!XT%@KV(bu0|sI5ldlI2)j z5b+%LDUSrgj1?DHHQ4V|Y>rl=^Ilz&){4(bA5^=%&XBrk5!y1JI_9ps-j;A!Oap*d zo53A@E6{we(|Ud8z-N!@ewp&zdU5e7LjC^RS`B6ZQ+E8IJV-MLh_rDGlGpTON3+V- zZoJE;KUSU(y*s+Aa>y>|Q0>IBV*pD>;woj8NBx3@B zWGu@0NmIIa7%9q9-KnW~AIG{Q&{)}fe&o&++LPT`;+(b9cs`QSS>$5 zjyG*y>#*fNZmvek#`X{X;xL>rO3%~_P|e?)0+X$PW0eX)}j_S6N979180S4y=F@|pe7m%XJ~TKR9;Bp!MU z5CI@vzQfAwdWqNqjI}Ht@?8ui73CWHeGzp7ng`lc!18Uf)C}3-U@|DN-le6L*@HeS zQ)8yw>`Qw9Ru3xOa*2z8lN$1y8R2646Qvi&=wI|rZa-3G$UL-; z(%-pIiXad_<>PEsyx1@uz8Y=GH33>coUXTw76D?ifq=pL_77j4e{XneuVk|YSVU*} z>)q;>uopuJrttmaZz&EtPxzyKN1Fu;uy%}D^$bQU9Kzs4szK6<$?GO8r0m1Hs?3b~ zowB4tU%b595i3Y}?@skb;5oH z23+LnW~joXJx009%%C?;M!auX5lFN35lz_(clW3jY2_8m$hdZAtLrXX-`qzF zJ$I1I&ZdNWLCG;J98zp83AXEPMdtI*kc!h8%Tb*4_(-he8?9))7d@IkB_64zT=7U- z%!`S2DcKh*L+&#?>#*^fqJK+^q`&e1gdYML_&{3cJmV-5nu(~KS5y$RR5G6EOt>7L zWef1-;8IxZlzq*H{Fk%Snq+o7iNgc{x=-1wSDHsW5wq_l`hX~qPANoFOm&(e9rLM@ z+LD}%6_c{ov3hr2sbA!7f5vk6x+5soS9acH0lUAgtqo_$8A4bIqNMCnK*nJxvSN<9 zs5PL~(%mzkt0{+@ipWTqXUFtxFrRUl@=$;EQ4^^J{?~Nl4TWdwebyR57WW(y5r?mL z0G6LU9lnX~oWe^D2Scn9apxFj5%ln*i&V1r(v(v<8&w6WrG`DB^lnlLGgl`YQ;Nl) zZ&3gyY8cW(!NN0~RT7oZ`f{4}FG$po>cN7xG^LcfU}_4|wdx#9iDtJ9?lB1#lO@9p z1_})sL9jYCsR#lJysSB$f3uanhZ~^h^^Ovi610_P)nvL%HzH^d! zD#cwDuM=_bEE}$1JAw&)F34qAg?Grj_5$doOLB(NjAV>Y=c;XuW(+VRuFh*Mtxjkw zQ?%y5hHhY_?6mfZL++Z&(*SzjR(bR!)XFtGTh~4yV|m>qf72qRIp28FuU_?V<^d zWU}d8Ac{oXoGwPsLAbD7!c6^ZXk2^^=OhH^B7&{;via0u*Man8l8(|EJdy)Q&eZ32 zva(G|4RJWZEIB>ya^=C-@1KxLadMWh50e6oELt^+qtFz`s6#)1Y$`i^0n)ujEy)Eb zrI%_Jy9JUE%#RjvGhk=kKDcSa(oa_RlG~3oBKW zo{>3)uOyDOm^*2o!F~NUCLnMd`d3HOzoNMYim#s^)?S#$wvX~h0H{-lnE)V+8St)P zS<#KX)qNy9LMv*BPgMk<&Ufd&44SeoV&lLOYZH?SuoujHUQNd+^#_Lpuu@07H;`1jOouYX7QJq{)w4~m*6D28aFg7Q{ zS1!4YkphjlUr)<^{9k7%+p2DA+R$A1LDSaWjjB#T)?BTUNR;eQ_NDcRImtUa5Gm;r ztrv^T{Y8ep|E{KyH6iZ`}+y?V*x@+A(lKn{x2R?W2jC*+)u3gjUoUI%)8p|f>cU--e!UwUf{)mY z#1bRwf6l0XRIPx^GI|LduXHV8)%G7S`6mnn=z9P(AvmX1N&a>`^~dAU9=uNa@@qiq zM}eA;sFy}YMlOZk*E8*=AMJJXnXlS#Ow=4V|M@;Yp9h zCpN<0O%nh-WfGCu2lE#r`!}8iF6|Lb9apv)M-^yV-4Hsanv7%s#%QbpwiD-b9FbNC z%4HVbo14af**6g&vcVvBKYWVi_v$Cyn&ofMG~W4-IyRX=t;5c@C@nAyA(tISlm?%^ zfdMH61@=?Vr*MV1!Q59eJ)@eHU&@@1fMKioF6@Fccxj-)T*V5xG!f(Jblkf7I&p#z zf1#%Zx5;8_D=WC^G7~-May@6UD?n16a+jlKih>El=kbtJLGx&l_OI>a{UQSSA|?(2 z6Qf7ex?kqMs8iQJG`H%Qp;ECd?QI@B0f$9aQx7rE=m%V%W-Z%}bo7DvOHoD`rcdRM zqT-@nY7=XhVeV}D8}tCP+_Xsws+UkvFB`?^QxDpsl0^rIa5xUdfqkC8DQXw-v^0v-vt0k-#ccR8-bt*Q=5b6PKHO#G+c#NPw=( z0#T8yz_R{|jm={6bDC&~g5Q}H>J>B;l-KvfNP$OZm49AV9Vl6HX|NyUM|#23V<0>f zl+W*!k%Ry#bb7{5kn&d*2k8puwf9=WA5-gWN)md?&ELOgMX1@PDt-^E0C#Dv2hrOZ zr>#0En6bK;bJV?Wxw?c1hG5{8(qmJr)QUZ>CdkZl-6yR3l{GZwKTIWOPXq0WUyD0C zU~#YVK~`7xf&g)PnpQn{vcw=jPcgI(zD+uTYbiX0uViO*?liGhRCl_JUh>C$B<(JKf?jf**jDl3J7=|%~dYwE;P7h zaFdWc4Zx-sdhCR8+$YGaXrJ%2z1W%1o(<76A>CbTd9j(#z_ZKHs8?VBwc#S4Mqs!X07Uul-^~JeOfNwbs7W)u zhANIwt1$MnKzl8oKfJT1`UuEMMJIADj*r_Em61_*?Ei*i<&9lM(}(}+fdBJ5-ABt( z2rzaMs`5=@Sh2{(l7&R6i83>-c4Bc>v=`5I=LT4)sJt2dSy5&uE41>;x;$S!vtX{w zEULhGE`wE9Pj84JgXDaFCII?Hp#IOmQ@c;7FSExvGa{A?azSNP=}-qdg_%=|R9X0} zrl1xUZOLg&I;JR1cket~l<|CRBcUbRf%?xtQ&on2R&+7_{scDZK|M0iuHU||$t88A zQoM?U!+`<}Ja38Yi?eF=jkCJKe71}G*yHqnl3_r@f!;FnKq|O=d)ct=MN|)7M=rQw&R53;7kq%z zEP}-Szu?$+x6J>4aI9ynBzD_XKBLZZo859gFf+;hu{p%>p@8SbqZz?}UW??9Nq}>; z^WpKim-c6mhB5hm*s*j01sG#>fD^zi_j*euM_>Fc$N20J1A89bO%j6U4v<~wg5MCl z;fG$K`rx@;09+RvkDYc-CA0ulJU5w5K}@ZcH~s|=__S>>MeyiSFJ#(F>LMzbfU`UM z;_LGcQeKkz9{}0hf|a6HWY?vOe%QJF{YUiXYk_jtxf)h-r!XRO`C4g0I{=wE2j^%` z(}X8WD@{LQv-U6oEFwCKdaW$-$ibfyQ7U5`HkS-q9z5EYYTh0J9OYsAX=jlii;ivCQkYYK^c!Y0ABW6P00YW<~fbpbr+nRs&oDIRYBQQiRdWfmS&n^P-xhXz!eBO zVXnqHc0g9G=()spD0R?U-f#jaHSaFU+S(e_-{OCY7+wOHh+83)Wt0Y{ZH{98(gtliH##DgyVa|8;^^(d5n_bK4tq#qdKB{ceo&SWK$9fdbW7Td& z_zBTlI~5wR`;`P2i3JUbS}*t{K-xE3P`5rHpwCgve(GGvd;e;bC%&F;GpGa2&ggts z)gcP#5(D~n-Wnojmp?G4tFe+E{sGr)DMqra^=<{*mH__LZ!A}t(_TzWO1OJpQ`#Vy z-F&7j;-l!oq;Qad#}+UtYwxBdCOR62^zA{_iqcJV4A4Y{WwCzjUYXY}k=Fod4X{XR zhs#E8GC-r1<}-A3LoiU|Yp;GsK}=|GwnNl9o=Rt2r3m`GhToxj9k52P`Bhg50B>tQ zC&IL*m2UD-kL)%F-&IC2n2mqZaJAMp{xSK8-R^YBE(g$xqvzSd2rUAwfwkMlU1nB9 zi{exSX{o7F<^3MQxL>niy<#aKgPT*CeX*?UH@vJ2Lx9*s{~)hQ zkoi9?K|rF@MEM7(!~DT))GM^(uqSCZDQQijXb_+2BE;GyY`gs;r)zaBr`dn`_4Hm~ zqan^yGpf1EV)r{WYvH~&f6TZ~FH^_%`q&qmfVR!U`PJ+U<9eY2x1)A3C06k1?u(!X zF30^(wRu+S+s$9v$EsjOX06hkRnMplDI{?$=W6QQZ|2ymt|Dh-3oUp``?Wo;+PiGj ztJ3yq@0J4w4oBOJUKh!}1$4nhR!dN}c~VOi1^Wxm8?>A!b_hTVsM`4sxQA$nG@4}# zCUZqy@CuvfW!H7G@0~fu0_VZj@EMmBpu5NBi zTU%sM?0wmp1?8X(YrMKaDMZd3GC3&pYObWp$`H8W~~Z zmZWY_GIXYCH)2TtfTw&ZN1a%H6)ha&MWi$)w7iie!1fRpC`Eq<|%8{&y02a z+xw*Ylg}YJ-S?N4U@)oYpui?b_?<8%7Qd#Zz5S4)wdO8p66Gcbj>^|yY>c*`WLr&1 z?3nIx25}uL!jXH*qnAhP@Brxt5E4{Oh3+4x0OPr04{XCU&0GM>A zs!|9n0IIgCE~Qp2emqvG&;uqBv1%4SFi~?|v<<~&)5g%6l`4tldbE@CLznP>{qn!_ zmy!%-6;z8;qL8p3nqbPAVqd7#RxOHD#lv=n4KuZhZ-p+tHLV`OD8hl|4H9SwZL$^s zjz9e8 z=KYa{n~DKMSY)!w?;{+N-2rqojx2gE@ZSgs1VmX00C<)|x10VaH2x7O&jf+vxpS0Y z{|dHVd8$2fdt`5v{u!D3H@#QHMIU$@b=`zXc$1c(Y`DG*N}j}{)qn$rD#Hi zeb*b<%~M{qK{8oRzY~P~XYFnSsMWasqdvv!!9(cd;}Se~+2H@$IrxaZVl4hL6;@D9 z4*y&0^=D{2FW>-zvgK-|_=~D8zTb9UysQi}2eWcpA>1_MEz8Q(?95 zk|k5owu4^r_WBq9kH-%h%!6ae5iWa+Y8nx)B;oWk+nm;3v9NDDcS-*@v^@EGunJ2K zHLO{Usl%Z8MgA^FW!$p_wA-TQfIAqX+p6{;dbUZ~ZB%OG!HbV&5$tLrUy>RAB{cl= z!ldH))8OSW!>+f#!^Mw~!t#9B+oXXilgcBOQ$pTVdhW_gA#JhpZebLZ2h46c9V6?c zJB(2&cPrvxl_3!6Ff9l1@DQ@)62`GxeueBH&IF80nI(Pet#^^r(7^aGfQjk}WSc2P zMJT|e1@sh+69jWM{OL-w5c8SJE>_~2hVQ}nOUc3?Uh0kR#_x*YBnbY3dE#x;E*B_`G!&FHx3M?wB^m&bAGdPmeBWzHvbLQ^@L80*)EtLaEQ zS&lYUEEFqnbM5mSh3<$hHnuvkt0^MwNs~NCNh}T>E6SA>>-lXv(WP67#pt@IS~_Nw zL#5sw5!bpoW$qArrE(!J$J(g;VPWMwOQVr}5~2N$v1HsiQ9K=>czV0x7ON7gO z`lf2}_NA5XK;osr7*%P)%u4f@iv=IeTE``S>Oz(tR=vtNA7NtT}^B=Wy767~a zWaD95ndB^o%Qx7sWv!JS+ssrVPpupcz<5GpV#P!Ho!@n=YI{C9axhpmy{fzGSD!3) z;{u=WdXor#1M%`A0cH)&nGZLkWjV0ZKzUSmvFY7py-Vfk6;DY?Nw3LdJFU~bSTY9z#J zL~KZ9vI{v|-qvzx<22Lqs8$zRA;W%v1!|RyMk5bp_nTw2Q98xD(etP?j-Ny=_T5ih zO~hV|`*|x2z-0cEaAy)Swo-l7s5~A(r&h*b<-56w6VI#<7kO9LOOs1^viQYAD0fsr z5H8WSA0-gqk=TwWC&OZ{W@$TDba6IiL-*>~;0?yhWpDE=N*>6VuD}(WZb}Xw<0accOoZM3)@Fr@!bqB5? zF^|lyp9EbsG~mlAsICgR&fL)cJhH_y!2Fx_fIScbvE*`9_<0OK3<*OeNOCMDaw#PI z=4{V(b@djloI=YfWT3F8bZ8^alJQOZ+b&%58yre#BNQNh&sKKaWf+xg@+7FE_KCod1Dj2@Wt!tbsVy#m_tZn`pw@e67# z>*RP1lk;nyc%1Z2cnZ>}*R=JTjm}|GN2S?r&}E=uDm2m01~bgn z?Lup|K?F(Pjp2vL`0n(5-|tP5Ms6VDkEfVz6nsNb49Hd!e+E+6>5U0bAvU}=>C!E& z$12dOSA$};XQnAKB>OGkCPzMQ^O62Vvk5UFKO91R0|UPQOlB+gRa=ZWOPL==a}r8c zqh-{EE`Oz$i+|z>HrUG|o?9V&?|-bP=Si$>o;{C;x5(hp>>7w?k3YhoqN=*X$i^^yn1aM8i@J zz$jejo1XkIPw{r$yP%*TL;}1W{k~^i@h{7p@Xh5|x{gX97XN84xB-EtC{ZqsnPzKuF(KHb9@ zqZGi`6A*VfLYdrsJ9{r>Zk|Un_DU#Lw>_a8s1!zDUz>TFvbrbQbY~k+YYcXyypupjL;H@B^f`82%|}P&H$qnuD&kJAPhONyezW3LtwLUM5#Bpr zZeig=zs0hiSUI;T4aPS%?Rvt aEK*g!#L*wE4shAV)klM-_$yYW1u8qikY?vS{| z54IL5!4_C034Pamg*7r`U^$rH_F7dQH-zhTBS0uVDlZM9?y-y#?1Mv*FOzHboHlzvajogN9X61V1EJaj6(@4N*|s)l?&D6UcxF4}AahmE|D>5fR*mGt zpQa`hP>1DOHRmTEP*+sTf)e^^0HmI6tgqqZ`pv~cHM>??xAZL>Fhzx-f={(XAMyC) zL@=k}xVxw7X1~};5$eczIA1=|&B0&QsHw^GTXJt_v_Yg|FHp0F0*rK0eVfe~uPYeQ zP$tbj-7(!4i&85gy4>R*aL4OEvk(!Ysmd zl4X;T@g>FBra3O0V-FI@_1A$Rc+|EdwN`)2#-M-#Of{)9iPU2mmB zaeUMFqM2Z+k<|7NOM81XUR4%FkU@~mxV)CGd%cbs70>-s=A_f z%{((FC+>40g>Cl~pEwrZ@O^nTB_#=ujaEY6z2Inm-47D0>$^P5?vsE4_`hfo77WkR zQ=}3%!wSZDS}Jk}uWZ`#PHqNf5niheaVHe1QcDWS?4t9hTcszgTgg>-B@gBjD-lQpV5n^;2> zLKVuIZ!adiJnqVY>iz{HiwaJ6gT8>=4w0hZ;s%8+x1_N@aruu*(GyIbEP-MVL6!>I zj?JC%x)utw=($*_7PinBeu@)M6{?XyG&L!#RT3fL2k|`R_PF}5SgNmmGjMTo;gRo3n_WkcYU`*jt(@_dV8j3!WqbYcMbul&UX8VWR#-x%*W z=quI2f`0h)iZWiUg;vNwL4)=AYzlWJ+_}>?C!vMfU$?CLeukyf2YFgS&;AsCL)R&& z^Uk7*%AP93m=&%->5ERt0ld0?F5WZ|3p-4nw2qliu57VeI~Z@BrK-QV?l!zg>)kzat8Y;#N4UIa%rtEe}Kd(S0zE z$b#eZKleNiUO1e{L0GhuW+BDP?MVpugGAPVI;^AEQ{m0xdjsO7vn@gRJFcS|2~=D{ zkFWJl<@P-0(uForwNbbj;IeSW4k}2{0lYc9uJTu?u!={Kb)4-G58`wX)GSr2Qcz(C z1_(ilIzX zggfFc<+GkL!F|DR>+GNkx}Y|hx&KE!?X_tm1Ygcj>{b@?CB(9uzd zv3+ROD-~;2BWdM7M!d;)IoDTuJ248My6P#kGoC$JWgX5~PN(@+XQO}a4ILdXl|0W+ zZEfS#m3BT_PO3Zhc4?o}p!TYT@-`2{HQnP{mm-SH+XYwCuxA?ycmb6}+n=#{X9Hp+ zPRw}j&S?|g&>WFu!atqK4tg0=ZGnMBe1jZgikRo8n2utus|}a=cOni!e~OvXa2sTG z5NE03pnt)o6R>BvnqQ!gQRvp*$rU!h0Bz<5KXgK0^+Zo3q=hW%7WT{YByV7<4IIFE zz5~?yxp(mU^~%sT%n~d6)#xF>utBEvd*fbiE{f*^jfcmm+U@O)4Ns7H7?39BoYcwW z?0cjpNa^UXka@Z9eIcCFgr{3`=mUIQFTM=$F5qm>Rc0*r7hm8it&kGld_}JT63G6` z-GIX0f&yaapyG(`Bao1fKV1D)Di+S&vcu^fp5KUzPz()AX=9UFyo-YZzRIeT2yOeO zJbU@O4|#~n(zAif!?Mj+*KQ_G$9p`{<$32VZ2BL#Iktg?8S+*a>5p2~p}qwkX2`=@wj1q$WQ>wBUlm zZ%#WB92bmq}(&LIS?1dL9zDBZHB`vELn8bu^XFQT(N5^=)q5`7MaJkrQx=UWnc zc|;z_$V4HSrSlZp5G0`Dvg=U0^lcVI`Qo6S2Q%sRz!u?Q?jSUN7OJvydPw=<8qK20 z*)E}db$iS#lwS8d1bNPfkN0iyG;kMO(E7O?->79~=r*?qUk>;XH&gG}K1bpp%uF1{ zrhiydOsnrd>v|EgJTlPGb_??|sKno%3hBpm4jCQVOY@}p`Zh%;>Un2Nyz{*twp`o| z`kE*8;6&fYM$>@8hUWdbwS`($N=fp;i!O?@j&bvuqxHslMASXc6HG{h5cH-ceLXZV z%&DC@JgMoy?UoXY?)pWPmJO<_J2CIw0(OV*^_>QdExuF7R%hQ_$bn%md@%#x{ia~Y z83H~S3GC}_e;!YK*mrs$2il2?1J|6_y_Sp%4XCdsa$cJ-P8qG4UXKVdt#r70&^O;I z9Po{Df5~9v{7!{{Ef)qmk;uoBCv6FJWeZ|^uD5j!O1r1UrqJ6vN7KL6n7>c&53RC! z4k`iV72 zN&^pdM}zepI+cr+M&!v4FXRTD`y-uQnH)V1Q8=n4Uib5mAr^gG2+h&%@d3gxHUv{)* zW-K}?hvb#QjUg$@^4xAh$FqNW(@)6unx><)pWtib?UhXD0DV)AwvxnRdZ-ilSSYOl zUc`PaD45Q1YS+bvSi@nf{c_I$JMK=C@%97IyBG^;jsY~Kb$8=CRg20Sy3MKWgRI5G zcN;vaFYzrEGD_*b#;v& zDyu1GH=p-HjAIkC%5qKHOYE%|0mxu*iVXdgI$v~R^dX!@)xO#n2@_RHE$6Ljlt^qX zE8aw~F86fj+i^^4kLJBJ%l5mKeRvkj;UvMEcH9=T?K{FlnPLZ}i{ZCa!(~$DBf<3s zPVQ>tIBU6x(#eNS^R$@!h-2xMcMdk!Qj+Q};`=_$cVO9DQ~=JwP>2 zRGwJ2Qkw3nEDNSVhE@1N(Qg31$r{Uf>+$71aF$wjy!Cx~e3(^=O(Mmbj{V;C- zS!`MN_U4PNdPNG3BZ5q`Z;H>ltzi;Q9o?r0(tTk${W=iY?U;wXmke&eh{K~dbSX> z^rEsCml=_(w|da=H5lLJg6DGoW{YEIm*kuM4usgF2`CDgVNfKwovXS@r8HkVuU|z0 z+!y39>B0T$yJ!7P^5IF$kl>)8_V2d+ZC4wJf@cT7)Y=KEC`;nT8jcN`)z6~%hv6hNo$lxmSqEPR9@HsvAxV#&? z>{-x4ms7c$q*lrk@F^=pnV4NHzD{!R7h>Xlj;#s!pzK9ykAECS55c9meeSWF~K1aj{D+X9R95eFgWrFuHOtfFU z3sJJ*)o({263WlJXskg$QHrR|F!D7}(hPbg_KYIIZQ|`nO%44(z;ltZ7R8bZ#Sx!U z+hW$z-Xgr6;t`$ZDI1=H0DF}YboR22(gPzrC0U+v&bj*;3?YvE%`wK*X|s`QDOT1T z6h#=kTO%w|4?Bvol9gEtBs=lLR@g!st@A|)t%YlG^S2Lvn*@1WK^4aihYLpX2EESz zFt+?@Xq^KCkY~tzroGKuc6b&w7M1vTwC8t@_kOHq6Dxg{^Ncj9%G&lYB6DEUf`)al zAi=o9^0r})daEQ_&zyN>2%<@bIs|aMl6aNX|IL8W>cgCDt^J-CB0ihWdLBYNr&BVe z5QWSYdX3`&d0~OCl`xJ}B#kPTG2XFrSXHn4@z#c6jDKlnl3O#k(#utR8@J}I+`>$~ zUh_(5Y{i&~3N!D4PBYQyjgw;}oRe{{*bX*)@2)Ja1A*dY*unwDiP?=rXL0CHF3g~# zyeZpWq920d;!MDl4;x!w@lVpXZw$%n%JGSLrd~AS;)FchMPoCCsgac;J6TUHLYHdf zXKrQF3A)~cEXt@@ZtR0v3O-fd)~g;}Z`>U2nwtk*e*w4SSkx}DKq>=Hj;d7jA1u$^ zdo|@`#ft7}l=?ph6gp%rUljDnBS?ITfm;wZ^B+`g+ipr^?5JSF)8Sxz3Sry^)cTcJ`UzzBu-l6~xB({~v2_9aLAAhVKRm z5`w$K!8O5MLU4C?cXtAW;0F&D+}#Q81b5fq?g{R2H$Brc)7`(iQg#2^RS8G-K5MN{ z-uL-55A$o~KJKivqAfNrwYO{}640#=YmrkL^hF-eT;&}*+X#9&pf1*VYsD@Om5L}f z4oSL>X0c&mR>b!GnWZ#9gPK+$N?73WzKO>+Z6>G>sv(Iux52NpsBCYeNzADw0Q14! zaQZG!uO>6}x<#;c!owF>s_Z{_Oaq#M!Lq8$316kdoSK^9ZFy)ucRT>!YJ-BxIHTyfEU_+#A14_eEtxBm7_ixtx>K0PsRc2^%q?gTR z_}Dr3s}knozG~G7%1Say`qwoqRGZM%{hGI6EweaEzLS$I=H;k(fMKAe4N6C_j>xf{ z_RBpPPoCD)E>P|}a5>} zu3p1_QQvr?het@-zD!Z6U_V)AtZnG6T*ZFB`|0fgVMJPW@3|OiY)O4QiR5A-3c(T& z%sU?(AAAs@8bdYh7csDjOGJ(L?^>OmI%dpkF$U_ZLC04J*<@wEF+5K#fxl#)uNc$s$s-sJ(qX?X{8yJqR)uE(mh?i z8k96lp;xtA1)(wM*rpzb3-$@q?btaPZOdx>)*dp~tc6$xap9s9Wl%HHOgP4#hX~f; zc!7_XG<;&=PjXrt)RsK@bVS{+2Vyh2H+sVSrB%F57#rNblL+7OZ&8$4$Cl?v> z4vb7tjd>%1R5cay=aQ~?(YCggiw~y&l)TdH`UH;{XKwDBOjbtX(*sA##SfOCl#_8% zGnH!H0|J5P4Xbc`Ew_mZtWmLE0Y5;7fG3sk6rQ@|IQPTFP+|I)3KaAshVjfa>?v&% z>%J;wRrT@^ad|5qFkqf}(4`p{bI7vM8`#q#JXKoDG+SZJknlqGS4ZE=0hVa^*SD{O zCIpu?<&GS_RLQc)e~7AW$8B0;|-_@C#Uo7PAG%rnT^~qhRHw6^35#Q!< zN_CZR&LHOn6Tr(}2+GDM5tyK{bqZ)J7Fq2J$ z6dRqdy7)NP?n*jCJ>9?^@3bVhRL*N(-&tB_MBtTDOJ+tM9rsm6x^~~cS&t2|I%)Y7 zw=L04d)jr`*m`y`h0xCX+I}7sWB9HsgddCdlkQu_qivy7-n_z!+(Y62V?*=g{#t-A z7I1$84NZAJ2!pUTNkHoujv;Ewy)>QO{<= zn=^2=NWoA|C%|Og)sL2zbUDN>KWEm&3odwYE$KOLbNO*7+Hd)vYIZK$h<(p+o!gZo zu1Z=K>+OQ=+8%JQQ+>-P++>W*PDX@0J(HQRZS!wm6Q93WC#P4(clYp#2c1XC{fQ`W0;!UwfISG-CQ-eg0=^?q8_*%9{ zdt6vQQ(_*;ex{2uL(Nz?ty}9%6&84W`qf9XRGbJwZ&?8;W2AN?UP+|V%>*Sk*D%Z> zTGqCj&XY>DM&8eSff{eLQr4|qTOmFCSg^FHN|bp!(x~cY6FhWgUSIN1v2GrPGwrLG z`EirW;Lj$FD%R^ea0R{au{^&K^n|EHl$i8?o#d3*ap8D*<4Nwu8xxoN26*qW!iC*?W)4*l(;BU@lV|<_RW+hEF%! z;uZ^vLSYr%!$~X@tZuh5=0J=2OdyPTejA48e|pTCHR1{O+t?E3zgf4oj}v>I^$Y;U zO0T^i`++R8{Td0pQk&M@1Chpu6FNK^`5c&ZXGcBU66JIxGaMt>v3k9BB_{LV`dddo zsjN6n2V*-26E@cFhW}kE6D))4XC@Fk!!=b&X;`z=2gxL>NflD&L{68DP1kC1gyZtP zNz^{Wzc_JXW|wfD!nbYc{Ow+Bo9gwewV-KgWJ6oOeazz=OQ;@aqD=NntZ0J{1%wk4 z2jBTp7>@m&1?dPe;d8FAkM~;uY@7_ookFj(7}!T8%UnkXubj$cRYxknEAFf*)5JG0 zX)+Y?$pz-~FA7@wlJ+(*j~#I!{3bEhggi`I*VvF6j?Lvm*t^HqV*)Jj@SsoEzs6YV z!0>gMc$g*bQ)?150}NZ&tLnTw+Y(H|7#rtf?Y(nT?JW1@wnxOfBYkJvbWBEfyFn_K z%f7xS%wWwJv4}Q=*z~yC+G=(+V#wjqZ(#!b?4-H6PJHqus|e2vVTs+37k<7+Le_D$ zOHj;vU!3v3BJxN@aBFgCJ1=HZ8hj4~y#i86M(KFCR=?cG2p$ z1+n~-H_A+P>Iz{wZ)P5{9qArFaj72WXmWQ^=xA|^K*ICcyU)Nl)~02|O*iBOWgAC?JRpmjZeF@8fP79xUYpp1d2Fp~=;hyGK_sBOZZCV-{~|XV z^{KrlsxKcKRNtMdEdJ~Vg=zVG(|6c#IQkP2qG;PJ-2?0C{TcTX`vKNlJXV~Se*Seh zBBDQ^!$z!ngFQss&Bpk~wP42G?G?|jbrTIe#R<0S7%`%r*|ML%C&Y+1mfZ?w2H7|i zW~m>}GwcSqKQjb{je{|V97ef53ab-!U@RDG*-7SzMgZKC)+#+)^>geOE9EfTpq@MM zdRy(Xpr#SAUD-4yKu;4MAH9;~#T>!DEXN7a$knJ0sPbib%G!D34@0!JX!O@Cb^+;1 zrtkC9%~{yHHL(4_)H=fHT>BI5`R*9U;x%U$tmE~r8iiS z`&D~rv{=nSH%%WXs|E!ww3pV3!b(#BcJAwj8PxyGGK0opuyKQzI5BOx22(uU)D=oSuk$~}d~Cb0vyE0oq-z#J7BQ;1Dp8i@P<2T?Z+ zk5?8!qXLfrhx$fa#=|Csti9;RrOxtv6I%rTZ{!A$8e0P6j3S-6it0h}!CWmpsP46^ zUD*O*W0Q}T&Tko)g0A$bSG=1(b7)PBO8)mp12oSZ8ML!Q=duw!@w?<3fymcE(A)4o z7f@5*&zgWKm{J4J;hKWV=Z0ee5pU7kT(Re9+M%b<1*<>(Z_FFpL+7VoiD1UslE%_| z1)Rj_OVAFNNLq5KkP^!l&h^!r_&k;r2bI@XJ1=O`R5G>z`XJS+&z{B=8;i3??ftt#A+M}Nr&A!B3-;dD`%3@%!?2dV0$Y+gD1 zu)sRLzph%OS3x$=u#;D_DF3!Z95GQj_2jF0!wdyx!@6)`9Z&L{>T(OxwKmWiTqxGW zXh&b!P@FgEER@1B`3Q+KkOT5~Pdh80qLMxOZzajV9fV&-Y}&hx8HSC1gy4pY-Z1qt znPfI6m>}eK$UA{FKu~mX{1e%({V`!;8VGq>8@LiB`!N_=_#h5!+ChxC0Uf2b2$*CHe~1N1dmQmjkN^~g*E64hAB z1THmo{&k7#SI3LB`1x&5nwgpr83*U7$;se8 zDrwdOnC=xmqrQQ~u&C34K4M%e!IDGqxz8{=nU}0!U^>;h_vuWT>-}t*`&kjyuq`Yy z_tfxQ+ycEHZjb~q-Ur6DggBDf>42L6yj@MDRo*|_od0QnsGyoFlWP8{HvxClbSmZg zoMLfmPxNXkRH&bTjmv(3Q-w|k5x=c%eFsqW#pP4fDO^riy%EiG^I->p0(@q(KdDfS z*~Z6?t2y>E(tT&=rH-z6PP!M_N)U&NR9lWFSMj_IOpJ$_f2s`gKK%A46;V)#5v|O2 zxL#H|d)jYJ}6;qmT=9zLYU!ek!lBMk9TB8d=#R+?=CJ~lQsZj8gsGW&cF zM}tq#i^wF4bmm24@^IZwjF&Jf@Z``UJg5~~zw#-^(1qwP4b&HZ1SpGTA zr6v7(%;>;{iBL4M%`8mA%iqOUUph@lE);d-sRnRRtbMDE2k^FbgM$oww&N3lPib^o z481Xtt5v{ze}v)G0RaU&I7`(mpN7IlNBm*O_xT>9;$hp)ex9)RPd$m5qqWg#p94XI1+C9MW%v3bfA0tvBKV0e zw~2sIx=dp2dq0vnxF%zwvM=x`gSIq{jpU;Wpmi7YJ!~S0joNC%wrhCJZB?+<3>J)4 zR4jFNoVj~sW=x?W-q>OsHQ{Odu%2-zh&Gkcezj{josOTUMc{ynvFn;CDcAPr+~?0T zslLWinxgDD)=Iv*?pgrqm9JtZs?MIzKXWK`p`8x>3fcHE4m*m-7$HN=S`;%dfDkGCqW`biuAGLD{P2@=>P1X_wuM$#O;Y+!(~SB?A4ftr zS#2Qit~XHqyJGUcE(dRv5?JV2uhzh^UbtC>fkV?!&RQk`wF?aCi%v)Li|FM?yZFOF zE<~XkN-b@N9yX5N4fK*!u~`OB5-Ds2W$Vs1H2rnmPy^);E(ck*41i0t58to9-#iWj zo5(J4y+it9}n6ML$vtX2aY2A*ASd&o*f6_6;dp-HXG2qfj<>0*L9d+Oo?WaEaj)O zv(MT)MO{V^wewb_RZ0#@0!uNEF%HFknhR*3J(ts?X5l8Wi`y?p3!~zF@rUihg=pF# zGSlOHmk6Cq1CjFd3ML9WtK|V@&^;#0+X_Hi>kCKFzQh`3F`ZdwD z3J+1+Sy!8=n*}Nu;Y?mV+H2k$nj`!#3Y0aa%T#0|hc`;v?f!GCBtGp8pOcpfG5DDm z&hE24Hr6hXSLjBx@{@|}J=O<5Hq)r7VwBgR^g01u@ha$F#IU~fg`P_^W_LKBUI%lY?#t{w}Ek~D&6GNl)?8!V1SOOd&^2?tNC)x zl|}zjGj>mph-oW;-Tbj{|E({1U^yeKrKKgh8Vp-hrl=sV9`fbO1GoV1vcX#u?`n`O zAt8?SfR~f=ulKH_>SLxtU}4irT_ns29**DBzG`1h!|A?*FJywkQ zzT>8~88kPgCX;UpK7gaE0TRrmK=} zSNcOjPNtV-Od1#&NQZCrhTyN`yu{aV$N-fstE4FL!p|rJ!sAL$gms4BV7LqX@;dfl zbj$0S8+u3U;4Z9C3@=>Kiz!HzmzS(qW z*8z1c5YXJWf;(yR{8X?BklcEMi9dY?Qs$u4?>`B}#idbVxM7-If4$-AgZk%B+VMr` z9SdT>nHYj-9z$sFVPvf1znRASZyk&=sjvsaw+hw=wdkb|7Ne^DXB{lBRCIMoxAkr< zPD!q_eZ_O?VMPZO73ZIh3sRxlNuUn4`Q`jwM<}1yMxsi|u&ZsAGr#+@chGpt#vt8@ z<4BFP1XJYqE0e&uxjVt^cWHpCdGBZaJJ+Knzwm|c(qY!kSH})3mFG-6MwEk+p?1MozNqH840xWL6(*8$0Vp5O+x=@Au#kZGuAB zDvX{2Zd0G9gUdUfja0S@lx2q50V&2O#Kjt0qw0NMP2Y=yWds0U__6OIa=y(7==WWx z?7GNoaaTnjG_KV+56Bsmr2&YmLA)`@w|Ji@d&3(E+Z(qO3y17%re0$|Y3_KytAg=M z!9OHo(?y3=%ffd;;o)&SPu_>O9lIWl3<4zSl!&wDp08dLJ=jLa$!zNNu82OqBgFRW z74MjX33$!l+qQ4Yl81?}RQm9hH#tI!Mz*Z*`VP_*82&()1MxEwS>-(jG6y$zZ{Vkj zhTpVSO2sp&;)C2T_ISR?vHT7;s^8@I0wf-p?qzdlq=(tqAL965k}B&*C|@BT^%EbOO?4lvcyHv zFu~{h65k$xvo+k-1^{M{R%+1~xE4f{G{xn3E?GZaZeyFK$>4Js9!$?g7}eH9qLOmC z99Z>ox`l{)Z)PZ+j&SJV?_x7*hFFPeLnW$*La8bd{<@rdHb~WLc4Yv}w?b|BuuHbL z%zqhI|8jz5M!xt8G>!eSGFIX39B7WJ^`|K|l%&(M#an+j4y|D%j$?%~&Bv+)Tv@;? zo`}i1wuPv181{e;P!d<%QA=tizMGDA{9aeX5n|=$7G2W2Uq7y2Wlr{N!ON;_6 z{Vgse7mK00Y%YkiDu7YDp`)~gg9ZIvt1{y!sK7AcRG6>jIkiJ~%U8#XMpHWVdN;EjvOFDf&?d;3rcj1H8_WIwnE2{y@EnL;yTg9XewbHZ>r?w9 z@qIOnt-v8UE0ymiF1A~)W8#${vvW6ebYzWcCgE4SWA+u`6?oiC7_#jti1u2CH|$)C zW|VZ(@;Ln1G<{CyvR@moW4x!cK2;>g+L)hL|K2wC&U7;~SF4}*X02-v$R7c|Vo+MM ztA1#R2IfL1&AcPckadJ{%w?fN)oG9li$FU`gJ<3+d#}x0&3w9$WyuKj|7iODyf6T4 zlqd|Y_X;~#q?BCnvIU$6V3WyRR5!T^Emwz+wN0QZ8#N}P$AM7qJfeO_1*UI{OgfO0 z-2dycDA1&X&G5kDmKsmfs8*3ADO;^1Dk>_AV9WJ0udi}&2zsXVn=4w~k$j`mi*Z2otq@ZSIsP?pOt5Z9)| z7M3>))xTcIGkhI511GeFnze)8X_3g5b64GHD8B{n0}}|ihD^u;!!T+VvY`@-T1Fu@t0?n14DnOhBKXrrF*eTfaXTsSgqQtEnn+ixDuQHE@KM4Rd^0AE?> zCVF~whY$sI0*S1^(+!j?_x=`GrHknK*v=Q2VX+bjk52i6(l7GL^rR<_MlRTT6|Ig* z?(dOnhXG3EOMAdliFNKN2{%XUC6`+?h&Tvd5jt4RU7|=n-_MHS!%WyDzi_^;&?r+6?vn}rx8uTw z8+}9iFS$S_4CMYkq44*(cUm9bJYxCGyk%z-4(ma&8l#cCRa?mW7JZ+J!cN)6R zk*8#%2Z1s<)8Ls&D$q1=L??~}mVyLSrSvt5G-*^+RErb|->^!Si(RpnePjJCucN3@ zM7`T3O|h||m3XTB2@4Brwn(O@=yvN4@@K6wZ{KFV7zr~o9z{=}jGbXBFwo_H+#ms@ znK1R)*^d5tIZZpob?2ct%L%{woWhnGa-rUHVum<*8XIAI45%_v5FsoMU8SN)tGU_1 zOL(e_dE-o`!~T801T5WhFY&K#IVrh_v2C=Dw5}#n%lFwR*(%kV2-gmZthj2)mj3>l z4_^+0$id?%1ib%Upntk1K4wk?7JgM^!h7#(DXi^HTC(tdOZe|%`_Iq^f-heEqAp&I ze^{!3fcAxq7XB%w6c(@NrCMB4^BhdFU@kX(M0>`B~43t)5B+4}{$GSb)_71Y(A9HG{ThUty0NYKPKg zg=hT;sapY?UX2En6fz4*uGe<9FWpI71ZK&kT}1$gdo*<9-D4FNc9&jU?dNV-VjR`+PFLY{Zh?eR&nXSOcNdI0hU+#J!PDd6-ZQV~u(ubR-#MOqGNoJm2TZjqJ_6 zVvi!=?)A|^{2%@_4sLq)v5Ij{gj5q-vs4PQ57n>mTfiBcO5X(Axgzk0BW)m?WR?H= ze$#N%bASCKh~O@YlZN!oJ@CKYiyNY~V88WP9Q{|6m}K-Z@H)wOz(yu8yWIp-0ffR_ zB-Y$PBoicmv?|uVk;xNZ*WK-HeuEl90Ln?`SI~GjE25-}N6&`t5dV1cEpV)NR}4WT zA}a@$UZ)6V?QMUZ&HTq=Vx@zOutY7B4ba@qok7#Ju>1*AN5oO`E+O{sD0#np$xH`^ z>}*OzCeLWu@S!j0nhOrL9I)IJHwOOL_XxZ<4q^Cu)|FYx8~M;uN;zdM6U>Xs&n=wq z{+=NfO24eMHxk}1;?Se}sQyGnuFhBxid5n9xfCdfG=^a^Rd3 zFXL&+obF-zMzu_H!Q5%>?|}F|EI&~lGCT@qcVjz&AeS1l6*V}$4-iWpx70&)pYPW1 zmUM3hTYfh`{!_7|E&M3sd$fG5|9IyDfN}8c0r`c|NtDxR$0M@JU7ZTOY&!32r`_QX z1_lcK9?c;;eMH^_&rjE0IBW-4OZHvFl|IYd;Y3be;(SE^yGy@Zcbo#x`~Lw!uR<-8Al38D)8%Z+j@vOfq@z$v%xJOZ zdd<0UJf|tFqu73~c zXPK4S(PAxvp3g1rAIYf=`?~jnebpU*+hAG?CDn8*)`^o(-hNLr2 zofi9FGk!EJTIXS0Zk%IJ5;2lYnB*Nb&FVY?Y#@Kse+rfH~TKJShct6WDcwLhIw z;PBlcofG^6)AHiDw?xInC*}PEK>(T-j>W>ngPb;XBB5l5K?=s!@H!*6k0G2@)Ictt zm8^F^2F|J-=S;O0*j+d3E?BD>Ydn4vvtM0rP&EK=ROtU?%aBz7$mD3%w;vK`Ph#jo z+eYc@w9RsUnSo9!Kx_mArYIaMX1|x5#sbvY0oNE1sfIW2+LCp|%q~?8{FFFaW;z$s7eKGwp(sM z;f4uBULfl|Qu(CoGBXo|$x+FEd05f@<8p2_pr!_Cr)gX!g?;@cW3Ksc#vEMgAOFt( zA2{m+@t3Vmgx&n~Htjc7;txgyQ5_I}&lU|>c{WJ0lDMEDpiVrzGrbg4BPSK@DBxIq zF4EG}W4IgmHza`p+yu}^c1S!9u3z;TGCw%A3|JZLH7Jo`k1hh31U(L9AhMX1ET|Jy zrlgb0(C?iTTeg709z#^Ml7rlWXUH4* zM^QUiip1V-dMrrX&&x=^QQ>3w0Mj4!h4TL&xCO760d4_2iE|2EkF#?^ZFGu=@N3Wp zfXE8tlzOAgCJ>uzJu|o{7Pg#{6#Cf6l@B))=@XYJ#^Ro``q>)zAU#W6iF<}9Z=C?ps zz>WJBn9kmXrlD0qj$sP%{CV;qzsE@?K=9*p#~W=|eeI{UHz?~=@X>-wWGD@Cub(o0 z%AX^I58WGmMBU{CLX6;-DzSN$a2JEqJxXjUbb->po6F4w|2I^F#58#V!w&kZ#b_#% zT8X9|CI1U^`SejVXR&1+D2vvHj{N*O_StBdI*kc$K9{oH={Y?F<&smU-AF=Cnd}3Y zg19)D)tZl>po2YA$W`x9ty(Emw;r-vFgZPGmW8 znbcx~MGmU8!~(P;Q07#iNA>&eR1GEaK<-xw4JqIbA_JM+=TOH%zy9%tm3wj*3pOJo z&aD7;(jWjDm+mXyr!c8OD6`W6-_PsBa5I2er^RiM5o!s?u`Hxs-T-|M@-q={f{#epxU^?s(XzjxZ9C%ci0Lif}^ zS>oD>KN%YQ3RipX(0qiL9QO?xan0#JdAsYrox`lgF3zpJ9Uen{Yr_Om`@^h7#HXD&C(b1oO+TxQjwhE}=XQ}FZ=5Yq7!V}^hbGrAz z_ERO*+OJDB<%8ei!Yupbn~klwugksra1BhR@=Jz;_AxGc#qb=XqFE$y{2ZuiX^9nZ z;qY7Vbwy;65$uiuyZ#>qUc4m1wKj~RYZ?tS0puH z2j93|^@&{eQdM3urOXe1V6pnGI)i_van|-DG)twghXzhlsv<>%BSekak1^1>%t=_Q zirLH*W1{I7yrlmhe0&-MndP73)pUE8(PFM<<_71)$^nef7(Wk%JsvQ-B0zL?hER+qk$wVg2w74ae{-8 zax7ubVi!42H#ZFr{U#RAG9x?vLPE$bml@Q_=E@+#3;jM-8d5p1l7&X9<|F%Klg3Xi zySg0HYVVufu^|@&ax(s8XQzKhQ(y7uOzrT#^I&=g`>h{-lbor|H3VXK!Osly>BYq( zLOgTyn{X6tvBwP2(+`Qsf*uWWWhwMn!%!QO_$>nF4R`B*lL0L~4x+FLjk16{{ciXqXO@>*2rW`mpT$Vi*txQ+2d0dH23@foD{0 zXEC$`wwjSZ!DiXkDzC$LVyw$eOT@8XwQ!L~gkV^^jl?A$2?&-D_{KE?mWZ9MOz?tR z1Yin($3lyf5cu6S2m~GS$OEIpKh%xU>pjf@COVBbZ(6TMB6g#ps(N`B(e!2zj5XIJU6Kt~UR3s` zkKW-O|0G7pg;(xl<>RSXChOkn$OKlFq*~wmr(-Ef_oK}y-U?u>RAv!+;YITXFc$uG zmN#su!U}Q9ogVD27-S#EOJ&xx;$nc$Nvn-`jKOj9D^karvcvgbN)}=tgo~=Yr26ag z1cd!h>PdIqfr=+UxZBFG5U(B(7*}5~rCB`mANuA0F>(f=wq1URlk;JH&40Q2U1ZdX zuszOfD0v%dqquyxM3Or}nL#a2MMeLQ7Bwpyl^+eY8-N)7qB&E8GW%!2DCkQ&J26jej9 z|I)-KAK!v2l((j{st(@T&B`m~a|NTw#23yp`Bab!^PL$DShW~S+Kt+t8L zO6T<^Obke>I;D<#4Cu-S4{}(Gq`#C_{6r~HoZHWS>QL`FL7d$`Vy)~!Z>GC9eE%Mb z5}|vGxUnmTu7KC1`fPu1%=40Bn!`axFbZFSRKklZr*lyqE(6J~Ss8vs{{ntXh(aWL z90^Rix#~h=7^vLJx=z|v>KoxdBxV)gCS0zDZXD}lqgu|1Izs&(5VC6*PPmf5*Ew%R zmlbYk4T%w-6k#v$@GW#F#q29}PyfRbHBJtP!%_0lt+YBJEynAr?6(M*ZJO@qEa8W< z(C88TPkh>5v!s?bq8@S|R42&xl4V|Yx3xkvfVFW^s7uNNfC>PkdiW|Yrl>@`zR|Lz zlsx77ID1&iv7s1r!jM(sT;P;Rj~n=b(uypy{*+d@NAm#ldfCw8yI2b`e^^K-d&8{WbZK@D?Rz3rmLR-w**$BT6jLbJH6iNC>AxcH1A%E zd&~`)p~7U{94Tq>Oi801jOWXV59}oX?r@?vcdSYRs6y#8{eTVBl?FGgQ-b4JBz)ev zpmsJ2b?=yl5oY|%sZ{x7FNYHKlhF{Duj`j6T?QR~-( zi6xOmcM%Ew#9oRM;{zr;u@T?t9JzT?yzbGF57ICz}I7+LoY^uXn@Jp$|-t3Ra z(NlJ#p=Q1wpVH;s-UP_10GJ!x$8TcWd7kPs^M33PFJ(61rz=Rh*su30n99#6UNBc% zJr_>4Ib>w-8; zpg}MjO;OGpmlaYz@~NzwB`Ay%Yi{G*$O5-g8D2RH27o2HV@#P1!O@M%xNX$$GhRql z;|g84tr81fG1l{TeBY=Zm0eoogQTLI?*>_ZkS#*iJ96F(%oCaaM%B4miYmSPWuCCF6H@)(Xpp695v$ZI+yC_ zQ|_00jSs1^OdL^QdEk3^y1tcw^ibrB!5sF$_4}k+O%rKgjnZp@0~~>8_4b4b5C1@|gRw+_ii7 z*eXK29{8ncFlm&k&Kq~eiuSosQv68%3c7Es>8V?Sx@qZ-*kZSNpRH$?>wQU4X^$NJ zH9UPcHPoZ|%3yP>Bzl^G@EAmPT2csibA)6~>tvL~d%3FTkm=ut6^41torbeR|Hu50 zOdoiHfyW7y2#^pkGz1m(b|HcJQ(GRiTn1{Gd)24AZC$e%iZ^p%Yu4x{JsE0Po40|l zsh#f~EVZas2(f}=(sbNyJV7AMyc7zDtV=lwYklZvO!@sV3obv}Ge z$McA8wv1?XcEUUhcX@i+Z|SR7oJ}-BkEo8ikX1wHQB#pLA(Y?FajvepzPM9>YOkhS zVFs)!+lpOrqd-FQHrPJyes(g=M7^=(ZBmbJxBy%4cp*P((Zm^ojeT(!ob<)h0qAHD zy_2SXW+6qgax0fwdweC7zuMLL_=Nd}a5Aw#qNtE=%oB(x-|U4<1+Q#aC7`_!mw+q)5ub34jjM~Q&Tm=^(NMFRw=wAi0+qP| zqlEvSUAu^}F*J8cX53}gQ#NR8XeH|w#H(VD^|}>IQDG8zHWJ<0TvR|5PS>u~BiR(gAff2SBBD z(eQNVaY}A~0e6I*xMXLtI*y4p$uwy6A(bG5Mj)3Ps~_gXr^=<6r#u%5B+L;?G7>+g zh?)k-i$!!?!?v;KTni9>rp8pyghAbtXB0rz8NRtYRz1at`Xd;ZK$BlDLhW?%_(I7y z`@8;*W&k8qQ#kHV?=UBmXrrS!#63aiMPZzxF5yxyzdR@+vOmg8qvTiYJs_6XeQ!iT zPXBT)bHDU0M%_A-qC1#GNR&#ml0d@SZgHnSCF`9uY=D?>$z^hjBTiF-Y%Bbuu&Gr6 zx&ugBC1hvFl~Duh=ks?rqTdQC3&hVdausp%kXiMg6%&hDdDJ_C)b$kFcr8#t@ZLB5u-&H$zv^^ootz-l8h(CNdlb&hKIebm(Nz*D1+Tr zXt4YG?-~ct0yZ(wO>$frxpm#6c7`NuYd(4Or=y@ivSqxY!a7suV(S$BQ952+RItgl zRo#5>4Y6#~pQ+gBy=>}$7!t$sbBsl_OTGSJKeka$H-WMQ_|llxEo(sDN}{me}x`l19B1PAM`nZYZ84KV8=NXY&v zh7vYrW1xL$W~A|tPdW+m@?f$pE=L;H;&ID0*jHx$asPUG6=-#{nkjs`=+Y2c6Xk;H z@N*Y?N*xXjuJ-x3P^%3{d$nK^;cpg*3MJ^?h75&cNzQX-8}k-#9P~MKiH4%A%KOFg zX++@Q{E{UFF(#7lM*2PA{WN7s66wDki=)~=Qn`-vDT0;cz=cp)it;>%+U8TGsCd^0 zejSt%nF8K;qowrf$s8X)IxZhSoEwr8@cHGykS%8v@zkJM*efE_2noVzCqoMf3C#qW znVHQnz6u2AE0Wv}ON2M}vt8p*kiH=cWQo#|=>O5UEr^u=lng#?{o&x0LNX|HBI;R= zKpcYxBo)qETMCmPB&W<^Pug1r)Hk-W_T(=gTI#EyO>|Uti0)SbnMQL``*nEEy`8=A ziI5T+nu(0mTk+g}%2qyNLTAd0<3&P-lNQ3Ad>MyN`;OK(Mr3GCjuYH;$lkWNkvkR^ zaC|Orka(7jV>SjwYzr74+$dS#{1!#mZi|#=Xat^@zO!9VKhpR=yo%W5J|FY&;_OG? zitnL;`@RRrH3~2$=oPtr=k2k)`d`}Cg2Vo;a!qr{D4 z4D5Fay9RVn2xqeQ_gSh5$G^?vkV!~5hy9T~lVSbN%*}!lI-o;@B-)tOAf+c^(NB)&-j=|yN-BypqNnC6QZ7PZ97({AsR420($3_RFC)c@ zWW4(tNIO-U6goh#7X9XEO0MLQVDu1rK8-!2^jm2-&}o(7I2uyC{-}M3YG*cZ?E2_u zeZ*Vp?`NCfSd=yB(9I#cI1=j{R%m8@ivcOLHc2LrzhzWQn=R=xnL;Vn>O?7y;6!D%nGldPU(hy$)l=?zy!iDCb)vTkS~nuy6M1bXmxn z(*uu?klnd%cO8X-X0iIQy7p zp-B`+F^kplkSrS+bKLcO`fb_N5m$YgY(fFTOkqm$?`#W_?UXZ#E5f*~(@!;9o>q7e zzuJegcY6;*tC^F9213xNB@}!e8*QFXn&(rjx?-lS6@|{3mJ<9;Z$08<<^p;;v*yJm z%_$!P6WPmmGanwYDH!j8n?;)W&Ux9*0c$Z?TV39P-pqi>jL?st2a{0=jwZ(0NhwJP z`M2|xwfZFb8_@}9D!zQWbps!h{Ma^Ny-XuP)_&kuEddRJlJrK%{QL-o#z}iaRC7PG zBe{@?|N5*ThlQGmo5?l;}E=Q#Jee5x~>ya%Cf#TD7gIUwIHITZlsW;QYh3yD zQf>VFI*2eN60FLSm!GGbi))g+DE;{v>Gs!z-ar9avt=3;q}XstDcv8l5vez4f=ONlo?V99h%w5Ty1JDDK7^#x7|rtG@L+ zdKHPxmZMxj{apyrGx~4@y_J$_&7Bq^_6K;-rbkzJ1;r=fEkSWIq$JRmn0|{S#3d@I zGfJW(qBVfGqYYvYv!LLWH*A~~BfJ&Wawa!TS)mJx)BI-)S|w{9M>b> z^^T-cW;>OkNB_`rHRLJX)X2;ZeXxqtIASf*RDSzsbSha4e6l(X7U}Hg2hDn0e_R&b z-rfj&m6Z0j_sg+rB8TYGxQ{;CH+(yzX*n~P%Wwz?T|a-d1Oskdd^Vc%)w25Ka*Rzm z{@aEABj0-S)G!rItyBO8RIaA7V8~^AK6J`Px zizT#%M*8pN_<{AlHEkJZ@&{Zi3+$+Mfu#BmWJE+`zV#G^w6LY~CDCth{R-`d`wAJk z-&$gS@wFN5@vYk;jm2^<2cOO7MEQFcnT>wR*m2ryV=_7&o*LVj=%c)SyIL}r@!sMr z=f<=XlGSp$dl$giUEdzT#+vNrL`Y|$11Q0BjIVT66;w$^8v9C49@w%9Z9-B@xto~3q7=G7Tl#-0RTW{Sl@C@p zrNp7>Un$$Xgw%;k4WZAvMdCEKp(kYcRPlZMT6T!9!HGtO%LL4Vvt*7{*e7!wKJPmi zB7jE2`J6g@c~2G^_3O(CU(+U_O`VF=f`okk)EUg=`;_=ziyoN!O9p_F5%U5Za{%=9 z`eI3iNS|z4rPmx|Sw`CIht#tRCC(&gwE7jFX4PMIM)Q^HoSs|MbvExZKzIab0WoBv z**^X-^R-ql(}06oTC~|NGCzFOFA#6Gf?=iLSPu{X zu6{8z#_7KHS&lemB&f~%o|J|rv{(Df)|TJzO{m4a z*)rI>%U@A7aTxSkZEvaMuJyKpqCN)_5o|A-H$u=p?5Za)7vrV>!k1Zlq_Sg5 ziHUb=^Oq0+mPkNHAca09ZhWE|3Gs|Hkx5N)KwiDzMNH(YAu-e_?5q3uiNMb&oW79p zRT8PsrGb3@GUR*~ou*o@FiW{>xFcBKpuXtCzX^(5x+v99-7?+jvaRiuc*mz9>SF8o zGiTc2SSYQ5Wkno8;9gw6eK$^6a`U$VN47&w@8YR0pBHJH6R)Cpqb2GbWM3rldFW=% zgfHHGztCV`%MdWDDh};HXV;F0lM}itcs2Yr0)Sndw_A>*8Ot${4p)SR1tu_4rMrW* zy%E~7>ui2*jPi&X?5$0Xd+Cy+_QCTlF(_<8t(4~9+M`a!`@oSb$Uxrl zEO1&Pnjn;9%!~)lAfn6Z$<;VD-7Z!*#~_Zfs%aGAPe90HMT2w+6^uxT3e5EWMf8r@ zn@>ZK@HSUJ*`Ee0##vF*DjHKB%4GB{Jfh#gmhf58TS;x<@@950tVyQ~0{>4(8lo== zZXw8SYX>{G>d_shQ(vVlE#Clw?JRmTzwY{qnJgP3&6Z^@~)xxDJ!nMupf$&Y9} zoS(&PTlOi*DTyfa9GahDd@L6N_BVJ;6oz}W7I-t!i$&EjLycs^ zxB@Ui{>uy@CE*`y$T*It&PhQL5VL!1G+k7i7ih6q zr=%#*NZfpEvC!a+HZWqe(t_Px!Xn@U$ru8gI1&kPtg@C`{p!ER_q+=MA_M570Rb;Q)b7O11Wn^6yHSh(? zWOAF^q?zT)!c!su&32z=$`U8Aqv8)(h}_x=O7pVl4Z&12%N-&JrE)l8(gH;4@VUwx z-)LRYWg|=rfzJ`RP}R$K?3H$=1-rYo4$JBxgs<*3E_gVzS~Tr{!{f2T0y@EsHJy11Oy;*19AAofa+E zAWSb(xMZAvV!{bg2 zZZ~5h?cVvrFF2}mDzKY^>cks9p-~2msi2I@Pmkv{gZcCsO4D1pRO9C(Rp$%XBQolw zSa*aC3JG22*2F@dw8qejzOu9b?1EK0P!I&9yzQY#d3RonD{624>Tw=uWSo9C3EWYD zz~;+S-L`~7F}K4Ow6@1KS*i4^2UyK<6$>IEhc7WRWN)U+41Fk(sfmyi_yEUjRZrYz zJPAnC=7mR+XC`t%{*zlgg{%l2vkCSWV3Q4?yt`Lb`#$mZ?dMtHxHgc!ehoNfyN0XUt)VA&&9J`uF5s~X#RRMZ^vvjC)bEm&D3vQ zeq^DsQU#Q~-xj&wBB;e55~LZveJZS3Dfkcv@7;Fgu5{HAm&DRa>f<84NV z>*uL+%;Ij)t8DrtGXPE+rka`>kF@jim?)t;JfS=j0bPsR06@{mtKFZhr>FI_K>_&x zvj;&I6`-!m_OqL@DqrYa3N#<#Fkie937>qbmJ+8>sJT2%sLHAbDcR-gEox|nF>NDd z#nW$&<~1_6#EvF7H>Xt?zIKt`-EGc)ke_l<=-VAlRw=uT6#*g$=(i~V-~(W_z#rb1 z#2Ll26_w@E@*f$Da7{GL%kM-AFN=0|SKJnG3RVQvhF@9O-l?!ml6LT!1}OMz;wLKM zg>kV_wJ~~^WTJX}irS6|p(Rt*2=bMA7?K~y*G>{DXDtxoTXAn)R%tyfc1hk{m#TlM zx{G7RYz8~*C!65hmlHRaEZPN^DaqO0jh~!t%<;~7$$A$UV&ZjG^AXdoOk|joWWVnO zjw1k`?rp3MbKpe+bxPe>n2${cJa6OYQv;{-Hwz9nl^Hb5D^e^}4yN?`{$0hu+0dNe zJLSzso}>+7D!4L!Q8imF2Zb_tBNY9v2vv$U?lu4Rrv|dUJOgXGb0_hj1PcGEUtvmi zUb}^XCnJHyt8y^eB<=Vu8k=<|Fz~I#BzGjpMek#Ervu~Vh2RMaclXw zV+ov9-Tz?x{Tw)4u7bpft?srl7-eIbEHc>?RKU^SpW55FOc1;Qgdh=(I=e*GW<8|g zNx-JFt>w3Y{`GW!kDqUuM6PtOeu_56=F@*03jt_WU|c{{&A+D%?frq2D99)_GsgSx zU-{d5GPnTriWZ5?7&zqq*PQ-ZFeMQHG5R1#1qQ@*z~lPc>wM18fh6h#5)&f-FvhpH z7KM0GW-0kAK%#*DYp~uI@7@X5aK+^OK6C$J%Ac`8l@MrHW@t8Fz~z?j9cf_ExF` zWRroA+q@uN?+Gm1UqPh4Rm*`eSOl%&%sel)A31!(iR6-I;Jh zCBB_b1E|1yUBDZF1JgEv_V()dgX`g!A~YhhaE-qknGFuG!k@o)gtWJZ9WU0eAMk@& z9Y^rr8H|rQ9j4v}hq4)+y>oln=AwC6mF*iDxnN((@BnMM>Ctf7`O(X;(SvI>+(G?r zv4y>d`z%A2bN;slX_|rhXGDw{bDXNx;+qTNPu~PtI^ELMl_sUTQafmV0BoUv&AX#R zXKQ=sbBi8?i?7I(sDp?Ngv*`f!xP-3)x|ICCFfbAC68UL1FEMNcNi>(O=- zr1iZ>p{6v=PhR8tU44tEa=m5EFOcBi;3UAkU~c6g-Tb)1*x)*(YIigTgNO>TT%rM}Q9up2jgJ+*Tf-kb0T^9v}@b2Y+;^~g_}y>3~x=VzHyc*XtDyLg7|%K z`+cIjkm&%^i=6_>Nt-2&$dU`LhsIMd$4sjsJ=awqDVFo%v5~Y*VtN0!>O#Gx-MPx_ zqUY4?QxT$mXM)LfE2B-9tsCy!Q2yo{e5<`R57kl#yfQ2{G0sycf+VlIyAd|`2WF;` zwCRUIf51}FDdfgt^W!76J6*W5#Eg1y5R631A0W8} z3PwK9`7c_#zG1slZlJT;j@*q7ty#V@8|#8j)(?YDhC2ZbQE6+ID>cY&Py1A(#+Vle zrsbO_t|#3$v>Fwm+9h}wp+WgA4-d&$F2kQx8||nrF3l6JJ=3$rC-eIHhC2N1O_i#F z&eC6RNAOLO^-_3JFX?M6CUH!a%LRJxoM`SWh`YLePFuH)yEtvUUt+R(Kl6OfS;fS} zt=OOccI~ZwM`ja1TD!}{;T5`!@%DkrHauNn6Al7=`y4lX4l$$!1+IX|WYccMs))MN zG40~b&{dT(@bKO8x2EbqbE`}Cnlycb)`#nN`1r?)rn)V1U?4~RudHH(%HGBUK?x$E zm8QLjA)fjq1fB=RqhEzYgsB?chihlOx1BYEF%oWvZ=v&XRb}^AswvK!WzUVqi_mvG zZ!N8y{L$WaH4w$+_KhT1JJ?gr zw~$^YA2%tzVWJT0SJU$eI8XWK_GcH6<*_GR$7OdUo`*9qrTlCtt<95W(XAg@ z#Fl{J09UP{Ou9nVKF-Bc!u2R9S!lhddP77>7+q`##i#8QlmqtsshBBPvV-*;p7wI> z{rxhYj~|(I>N)!k7T)!5$t$R@L%G+lx{=;QQozJ(an^5M*ROVaVtOFuPZl0{e5r*! zZGFyL*s1qzZ9ZRXV4RbP)1cGX%iO$u)$kn~lhZdqZ$LacSyyvF@w6HBGhN{_ z@TvJIfjOfBAwthv95A(5G(T$ArI*>c#|OK|7y#G4&2IIbM{V6qe=IY;->a~bA4`Na zx4JK-`TZ*5b_el*QRr)AKMbsQtLsSP40$S-x9^$`!od2^0Av)D?}334ktE=Hud<@%F41| z9amUl)Fu>X%G5|D3ZVSmU;z+z9Qx_*=Pi~fF>FbozrSCm=PQWQZ7J*26Jh|q$OR&` z`rPo0IMVK*Mv4s7mCLI2)zz7F`D~~70Oe{ht(x-)cS7&;F4XB}GQ{OHTEAh`C$bpo znG$UTu+Uy*fS`$3z)}+E5)VLP@+BrLY9i&z#~cnpDwjgj#pP64)G9QSO=inAFxR+K zP|cfZJh8rX@mgViyicN>BO(E;+R2&6mpuncgP(*GZs)ZUKf+aORQ5&h=9a@A=u@?hFJB?uFuH05Du7mFi7PUrwGHmm*0^x<GWonUh>)EU)^gbZqpmCDbSy|B3z+!Oj<$r%g zvQ*VCM0MylaCS}Nl2z?@HCj?lPcw`fHG16NFI-@;tUzau*M0ceHQLyZ5qpkD&9Qyo zuCu&Q%U0n@7#CsFCsF9wPvP@DzX8^AiwiQSV2{!UjK>i%&0!^dg(>w=EzI@7wLaSX z?ckw%TX?k}W`c|v;eOzx5p}8We~^eXK&r!!VM_(!?B}6z7E$DmdA~-kaV8q~Qg^yM z(RTQ{ieVAlh?Pq)g}s69eYy03Q5|erC2I!^<_~ae)*C{Vk!tM8%+^(~ZRw1g24+hQ z@}?i7d{4EqMWQA9VXPLe@fYhhLRnY&w6*!=%4RD1deFhj_{z0CyMR{WwSAEX%kI~) z*msYsUw)QpaTebS>EB3x=Sy}tjuF=Bi+!%mL3LcV4IizohC2oDo#|jyY@msRt@UM~ z#`!))b&bQ*?IBgO0$1M_@?0J%jY?0k3ZPNmJ!t+H>+e4%IBGlW7CgoFoQk zN+VJeOa0894z?8mqgas4P>)8hEOJKZ_N^n?zO+naaZ5Gr#sAFSYV01;;PUQ!pZn9T4J@P#pNI7xT`5m%Fz4)w|N^tn;zbW!Qx4 z@~XO9#L&RHCuFvdOds6_wtCJPr@OnhV-KizVLK|_nIj&qbClpZULcOD>G=~Ojr@WN zaD1u2Cn%``ykmsIz&5&AbsWQZ@&o;&;7&Jkfl?tI-Un{D!tuh=>W3Q2^vU!hQsl50fYp|Lo{yN3wM4UxxAD`?#r5Nn!v7={7+n~E& z$EPrJ)xX%hGdaO)KMq!2pyGuppYKOHQ(bwnUyzK6G4iu%rCpe*PM4@^q2*TH;z@yV zn%I^#U`A9`VKD$FIOkhf` zBQ)h}+1YU~33a9QVo9?;M@H^0nKy)bM~R)EQ>42NhH*Qk;g*O&FAe8?0gvvNaE?l2u)*dltGJb0I1T9G_40^8 zGx*h~aek*4?LK1pa1~#fHsSwzHRIsD!dFUj?0 zT@>kVC1C-6ew(M|t(h1d+D&FK-8=UD&2`7;bDPnh1whiGm!rc8xCKl-@D+n7E!(e^ z`I#U-1-59}fkw(B7~wZeY#M%WRR~RT6q<-!j>X?;d9cmXblBb?gJ*NiY44KV`8cYu zBU*01%XK_=Va%Ku+-+$~7)BOrwn(4=987fFpi0DxMYZ*#pheKkZUkFYwc8ZMN8m@ztXR{5=JiO%17x5 zWS9Uh>O2PKY|2X>obG7xCy^QgGEydmh$r!ZFjHo1W7>k$go^`fuG=wH_z-x}_q6vq z4_4%91oj_6`#4;V_F$@52;|hZP*NS5KJmpLnfwc+*IPZGry23!XR{lrwr|@tni-G5j&r5f@9Ea{>$1qO#J++f9N{NV+#i zvQDTaF)~h0B*&u)0?%PUwZp`%Np-V>EMZ=~p#*E4!!ioGt9N8FXOv|?8ZCk6nJi(C z_{Qp~Kw0^#ffYCz z^H*VJAe)F6%xE~WTZBs?z!deV(r8@eOk8iXNBWGJwy#CMAH{674V{sZvB6JMS2iN6 z@6y(7|0j~{S>jGV1%ar#3xA!ghsmD2KqE4BsvBa9|7>Nps8aiIa{b-aYNIVDoAZQ0 z{FkycuA75#xHfVNBMgm`2FFp0ppQR<&Z&w^OlU`1GqF)Ibu7_se$8>7z3|;9EEZ6MB6d(mh2xl z91?&G`9;5(&?#?V2Qq8Mq`~>!{cLWaeRQ`ES|3_=qvn?tb@ee1x33`v#VqDV&T)e3 zB1=h0P5Zl8z_edz!7d#l^2bu}$jRQ&c(* zbzb@NI~cLSH-489d5xu9WoO6(yA!Cb4bvBboVSa0?i z!3o)x7z+<{3PKr&lm&(66H-_XbAs3CZ2*CZ(T;ffDWt;Y+u>oPWGG>MDj>p*w8*Gbut zNt(c_3AV$+d{5mJRPri3oqM@)I9}j~akv0V+i4Gf-!o>l-FbAn8t*+DFZq5aCNp)( zkqdhp!{h!)@`LH5OtW!GQRyhU`VevgM+=o)N`S4gWheTg(fE@5>5`BT&GfKb7uTTc z6L!em>{59KD^7(EMzzQ3a{bdMC3`%LhwXUVbCS~i?jP_r;y=Dr?>K5Y%x{z3P`P+j zgN`O+O2b}K0-d6N+Ko84-Q4FdpfC$B0<4}tvnl`2x-+W4i|p?)!W6gUey+rD-ztH< z6NBB63IO5l^u2k z1}|%?uXtp{O$)+tLiGd`l+VS{C8^h1h;}v$V<{MJJbYyhtA4(^P*h*iUQDusRxXU^ zp*?f6VAC8?NIW-Vl~u;C>+sN2Tz90lXJe#fW$BM=wi5omDiXkMH>J~9nY-v6*b{NV z_aor$6Iw`)8P|@9=o3)0b`C9}sR~RZTHG)9=AZjkwT9MM-UR#jwOly411ljSt81|9H=Rgo7!hy62__?x9p zeE^*<3uK}V7w8`#D^5dnu}+(V_r$gyOiTy_y0pH<9)Ej3v^S1e#*=CKR7Z5Y;(yCg zEQkV`kA>URe(aI=`7Qrj3*g^*8Iw=mIO|Co#V`Z?z>9Jpyg5aA|Io(hZ!S@N_!x%A zzNM90_jrRFvK6B^mtKK;8}12e>hUFLnX~?TJS9^yZH0Pu@HLfecR@4ZuX;Ehl|yJu z^VM1nOS{Gv3R?HAUwNUXQ*OeGeZOsUP2kJOMAYLkR%t%V z0Cx{z>yIw%(H4o_!KQ4-cJI5@uJYI_&Ziyuik&T@^D~cf~-BXQ{j?5M)0M-2VP7}DMEBk8Ik@0XK z>(pn;34Myoo3W|mXus3aBN2aIrD~(j%0AS4ZR5UL$h}4i5B$6d2?3$E`S3~IeLS?H zLX1Z3Df?UMa+5Qydi{O|nta{J$#ONAu`z^%t^Q|b4u=*z05|-emV-vfn-+M3-!z8P zCO%iI7d$%V(n@e{r&T$uK{A@;gK+3MTb5qhe4g4|Xv*WHL*GDK&gr&shx|z-I&au` zX{2woKGNO+S3!}wU8G{PzdsC^_=nhH2@UwdZDA`i7f# zl|sLmO^Rq@G*?R>W9Qx~3`X0>3?@!eSPz=tMr~WyR;2+C41bUcF?P_nswWJa6^28b zuqbAt)|JpVKvd_kF0^QRhW3#0AuZr+al_9fowok6{fXDyS33CxND&m*xSxi`N@5n8 z>K5+Y~@b?_Nz@5_pUw(H*mwJc7!~{xgf~6{LCAsPD<}6=87+ z*RFk0;m{dMzrx^#_Swucvmlx;I4IECjS-WOz(U&Ch8jV^`ZGgHx;NB}Y{aJp=t(scI=|zA8k?nl1{ng8B%I1?bAoZ?h3G zV39{BM$tkLHEFxQPrYj0Aq_w(IyuT(%=k)*cPp`B=S$PEqCB5<`Fr=*rvJ9b+E*1C z6=3Y)zz*E)m39F#vvMGyDFu|A)RXz5h{sToT{~JNB-yQwl1n@1b&=f{;kB}1()IR?A2<>lY zX;sL?{9C&58w^YOqX%V+dD5r2PQ`IcN6d>$(u zRyLVQsmgTcMrCI61$g%d#LvBm_)7uWo;VqCuIu_@o(EcT<%k&0#j8Y+^~;Qqmu*R* zF(12WQ#b+PQ?HEHSmES$(x62oanSJvdlHp{{g*D8Tp<>cVk5iO?`)$vIh*>;)6v)0 z>Or2NpcFzuS^AP`Hdd<^SM9oiO)|=9kC7TD-p{+bs@%t0Z=hV0^q+R~3>ROF3JL-K zi{wN5TLzYn&HG1mdVpi#&gkniB%V*dsq2c%qw03tp!g35v>rnY?o2&Vq@Qu=0aT-} z2(JnEN$Q3*{Ldm-uYDi;2CKVHtkwcf+deO|@zlUj<%RQ{gK9D#IwS>@2AzQ7&lv6< z1}yuJRX~T_UpDFsh*EZrso3%=A2$#h)@&(g558~4qJO@o01|{~u~e+oq+t8|R3`8; z7RK?TKc3v4>FXk5ZZOk;Mmck1#0BflMc$uz@T|q%%x-dw%rfN0e z3KK}uOn?t$`(C_H@tbL`xQ^WX@Qkj!@gcj2| z-S;EoNPw5JXM>VE*P#5t)oT2`v~c;P;k!4lcrEIUDnu0Auq%mqzeh}*E|78amXg2m z46uQCv+h4YTV8hMZD3xGS+>9u$-_cH8`5m=90tNr62GJmtT+ zv;{ukMYALl3polRaGn0Dc1C{*C;UPFpKgMoV~2>kU^YJEVIo;oq&1^2@9*CNd}W6J zTN*3|UEJeSI5?yOG=BgVsd^;n0^3w5Q7f28>kr-Pp@x=9FfF~3xmeWi-L4lANyifv zHlCK-pE<1vaDnR?&@V9Tg|fNb5#in(;>}jO)9SX16BcJ*5UNe8q9oB zN(L;Mcpo%<{5)w-5I{ko^8R-^gNZ8HoI$T=5FpHFI}l`_;DjRAhvd{kMUPc{g@0>^ zce~;W<Q@@N5P5kaU^d4(kIAFI7w$uI`!x!T7@;>j)-X$HmkHuitPq;4?ho-iX);oO<@4K2w_?Ci3``|kYWP!K5_`Y zDae0{lWciV<_;?CARnTiZWJ09woh~EPHyt#Q_YO^1YnFMJ1?E z@ipDY-yHvM37RrhzBrkNobT`Fn=bGx4Aj;uqL?HWi3STe1NI#rE7sM)U9>E>PjOk3NaMRl=7>}d z@eYgnPJNh?=vi`1vNuOd+{*xGux1eVB&UAxXW zpx7*xlSnnhju(;Y_wSk6SW?tf*^P4n{mrC(p0{sooDpWn(5_*-lt7RZw-T+klDS_*8`EfSa zx;sxP&EDPt4Tqz-GMaGS=c{a-Q6uI^vU7}SZYC0xe3|qQ_67b3xEnuu357Qzg{DtB zyq}$dXwnD!2M%=kJ_FsTq~aFYO_Em;ta6?-pc(>BVr5}4#&ZaxUQec4^^Usc9#GNute=9g z_~n04+Xo9NgPztR#0zCpn5}QQ#FT7!#OvdIBVwhsv&0m?nU*@BG=oS*A{Tp6NiwTc8dMO5VPxGnY_+qXlj9{1N0mOn8ecWtbac-UEoo$@4=qEc zfxH;9BuwQkuWo`tRqjwV#~v$b$wpPYO*I%#OGb9CoEr!U6_8;%YmxAA*LHe(eG9&| zJM^au7%h>a#LiOI<;%mNuHC*3_KN2#*!%ZK!kWQxs;xF;9F8a7OO9x4NN3h3W#M+N zt_(2OzIdqRI807Xym1Uv_qS|~CTgg{@VSc?0HUq1yUPmOVA*t1@Jj1r24mw|oyD~J zFse*YxhV3rOkNFu)B+qEk`Ej>h585p7au>4_R9gb3>DBxnnh!AHx$q(nLgr)nldLM zFEYo)tO=^OnoV(o9+^hOx#7tp=zgsOC#IlIeYv^Vv;4lao%{QCH30$f=@eU027yKl z&3ToT7UQ%nkzhIwb}NDS(w341+=jU*4RRNiQ{usRfV%1LH2FO#$g04DwISOM<3S%E zw9l<{z9AVLC0x3_3#c+|7fb&l$-B&!X#}79F~*fH)W}<>r2Qty=cEWbCz+(nf37z* z1W?|p>5J0(6+WOoJ!M=qMBCGa)gU+gNz76 z453B+w7S~JWVbin@cIH(;r{4=$L%82xLc)gpw;s6rU|yWT49KcgbQzf3szJ8{G@!L z(x55B)p&VJ6`yF$W+Bq%fZ@9U$-W4k#PV1r;0C?jQw0d%cb(Y0e1n6-aAGDk_ zxAe^KAo|z=SXd6l8;*)lGO60gy>>-@M}cL@tr?E^I!lM;a6RjlK@;(!m5lx~Y4?u< zs=|)(=*GK2@{bCX!F2z_mq*JoCIS0aHt7XlL@m3XFv|O)>hn?~CGjUBoWJ@T5k9KJ z6Fm6wx@FJ3SYsSKpFVWKOnq5R(Vglq-%cj!p?IFKv&%nhjdMJ2Sbo4QalT|b zYr#A1_uJ%9gW-;Hn)_m_#Hza`Hk5RY97m^>b;IsaL$bY=>7$Q-QZ(oEV11FNIRKrK zM1GP$KpuZZ!R=HLMUC73r(^cUDvu1;X3z(s1YpLDpxw{%)HCf0rM5>LISYNT1NSym zFp)n^{DRSBNV`KW26bS(r%}T2jV z4#zY2C6q6>?QqYZ=#_Ufeu*D?dhBT$;f8&@z)0Sq`TEo?{MeH+M$ z-<>p{QWX}ix+v?+f9C)wuUN*zGcu_ho5%Gl5aq>yTI%;!_z%a#iWq?Xgr^Rs)|2(V zT~%;=t%+!+P!mI_sez`>ZPABo-rZR|LRVms^r)%$n7yga{F zT3v%#uAAG<7g^Nlrvp8QpJ83HK>(FVhwEwEc7197#pNhZcZ^~B&uJB{Tomr%{B7M2 z3U5A`^JNyJ$<%L@H|W+d-80Dj5#<9@NDVsZn+p!|A5K?HvR9e*&z?3G;o*(~^McNr z)(zZTnLL2$dTDH$;R3gx;)#nFNS(ZotYa0yBrI7Y_W>lpCn{_1Sd7SY&-Y8HK07UY z%}>1PeWxye3W=$Boy%XJ}A2yNI4jphR+*CJoR#>N>N`(39| z7yZtm_wm}erPVHNe#m)0g<74_Z!d2~*0t+abSnv2TBK;Q{AXhzJOIXw%i+ zy%<8n!ldTy$8mYrWt$fm*S-xrEzZQ_WVoJox*DaCbN+`iv4!t`DuY#=dX;*U7|e9E z!IW+V(fmOV0jz9xUUyJ?lWyn-xlT2kdEq*v!UCI~xY+em~SF}O$A)5oyDKzdV^7=;`%!`#}eNQtpDP^1M$_nxA20IN;oQJ2n@TUdC|KO z#-av(I$OK~216n@CS~{6z$B!cjaE8|L;(`wf5}};;rvE~sgy;y{iR0&2kGC>;`rNS z{qxzs?O+hl=_WTxh!Mr4e7uAZGuERZStk z;j@wS3UmMp_17o)KW%{H4D=<=%58iNe zgoD=WHlk{XViQ8pnnmDOkf`5=7%Z_c2tR+)rmwa+ z6zN{YL0buZz3LA>5tA6eYLQxHZe_FZW8-clX+RRO(}2?{fpRpR^|?q#M8^4+R8wX7 zY8UZ>H(C(O4Z@s(F~@zS8C1=yG%Gek{!6Nq#J48U*Bhi6Pnm_DNoMW`)LbB9U=E_m zOsv>er_U^olLU5qMj48ZS`SmGfrdeFU+(})%v%gT^&V7-ijdky3lI(X#sP;y{5$WA zh`{d&MUs0-+X`=P`XV@+Ow8Xu#0;uL_V*8gME47vG=xd+1^xM8W7Um~Tm+QVOd`0}Vi$ zB&jHHZ6{Yw%|6U=+uB!R@@y$d_YZq}vt(50&*j1~HR8D;7*Wubdt+^x$bySWV@zEL zU@2_L#~n#RoE{uHUZE}EtxAeOQG{Pi>Zl$PHi-YUU1;x%-x-9OUEb0xiIF(HvV(_73jUOwG3t#MLIkFEEOP{(9M6+Ae11LB16%N zM~4+6@_$qcs6y|)@c1%*eeiW5coG>63qF64AGYd$3kBTald!R2DZXB)0KwD`z&R)R z@B#L;<&qreUs_yy3gk}-JfFlJ~1yT z?oq&D73cbZW0$$(Pkm@1cRd~v`|W0km}PK$Yb}kmU0^Kt;#)>rHn;v245NAaJ)M zA!v9VHJWNb774>s+eonaw{2Nb0Y`GM(dP`r>a`{iRFy~(xE7K}^R2jo zc*W>n=o7*pPYJ_;c{OR$)pru}W3lVLKVUWT0^APW`6`3HLFxp{ zu?*sh#mleEiDUJ$yhDkP>|unQ{xYc>j7DQRz5xNm=T;;hs&_u$l&-J7Vs89R**|K~ zDQ406{D{)Rax(p=MBC52MBle zu7D#t%kUSh8)UNBzHO7v;E2~EKV#-MxFHb!3UCNP(etDk8fu%5mtX4_)^g%k%za1x zujt>3D?k70U>@!~bu&_6;B#;T2AjhZ*2Ltf3*t`zG<4S|-qw2=Fid|XtTSKD!w)jX z^6~QGOCi|~lf=7Ttker|NMUv2Wn%gs^UEszA<7|7%9#K`PdA2y&1#LPs(}Y0OmBOl z&Rk>3d6yC%p9^iV-clG*E&B0lKNwRx!M|ikYMW$e;JR8dZPNe$Qt99Qhf1Gne%|S3 zzHo23xt>KNKP;8O?au0GwH`^DGVB_V>HE){&4m4ytNeYJWYB<4n;=g|Lrg`=!sf9j z(D>#=tJ}tanlL21L%|UUmf`#}_Q%ewm*=9?pr&e>h)ag^nfJZ3f-18}3x|NorI$%f zO8Sn^=}0NLI=f0ECpC9aZ_^EHkht}p3vXAg#wwM9Uv^w?9rN9A7>r`m(}94eM!md zoBAI8_8pmO(#^(~O2bnE`xM&hkGywKY-yh;C`6~NpY|1HdCJNj>sK*0t4DXTv>tB; zf(B=%I(-5nn$NNH_4NyGe*E|mc&@M}DERa2S9Eyc3xEkAReu{+-rd5-E#??V{x>8G z{-$g^F!IKTUxYU-LlA+*s*Si1CzqWNqhg6!k{O2N&LS1G`Y<0f_YL{;smPy!y>FE8 zln`C(dQM4PKFyapG*XczSY;s}AHlI&Q`Vux%f^EPD;xinjh?UEjfEV8eJRD0`wYet z*#qWIBPsMXiq;d2`}UCL>8x`}h50u!0nsyj(?Dd;d&@p@($W4@C04xHWKdl<9JCdd zx;S2>WGaokTxVXK=WFwGGGB&T<*Q88RD(EbOD{5vPh=TH#mT1B1HB zlrCnGKsE@b(vaYmpsZJ`H6imfGML;6Rrf4gsy0LdD!Cj+=ZKrq;QrlU-p()Hx9g!c zQa90#1MWH*l7xZAOMo9iADK)z6i~3&aj}WF-ClwO?OAKrgBMU)i#Xq-h6$(g91D*UUneG|?Np~**L8f@ zFB~G1v@$X(FjS#xUqCd6$9MVEodK{knij3E_KwtTaFxt7Az*2Pa$%!EGRjX&JG7cUq;gfTG;*5X85N)-Zj`7lVzMis{XM2Bi;%^ zM#j+3@cZIH6x%2gbS35sHQh(D-<*@sAMux(Fn2n8Cle z-_w_Vxjo_T+_Z(Ki2p_UK*F>)5RArx- zdPXhiHXxM45-4jdp>YLN?16!NLQX=7yXxM?ImY;89Z1Yv<-yiJo)~&Bo3JkX@!p=j z_a)g{WM~>x`!1)9^P{sqsi(wm+Q-!AxA>fXBXMWWUaU3}9nB@yw~s&3Kr6=zhw59* z7f4Cyk~pg)G22bH(!H)ABgkv^==`5B#fZi%RcTFwmSGxoW{pAKM`LYZ20mK3TE?&j zp<1FClcMA@ko3(c7#LwcYEtGCwAn- z1KE%x4e#S}CF3U+G7+pFj~%;oD=D{c%bGAm?CFY1df>T z=1_xGLyW6M+pspVL3K^J{cIV&Nq;mD%41aPjHKN2l-}NB@nmgrYC1K>0QFj=am!>^ z+iI5j#Y;3BHK&(dPqDxuAU38|Mr+r>zIa^I*OtKb+O^$neCB$&i_QsI0l{s=c@>k` z8;uz&XfYtBZSCs!m&yseK%sWyIhxT>xPPjaW!#?MwYpYPDa?_*#2li2gDL8!Mx3tE zr4r7dd0w3AaP+vT_AYmo%<$TMBn?u@I#{`$VBz7j+JES{B*^y(FeAgHg?Elo&FS~Q#Ia?`k z>4B;VQaqWwA2jBl&`bw`b)+1s9Dt=4&YH!ox#r*ob8RkmIT(1vMnNgENZ>vx+dl3+ zcG4cPJESizZ@0eVZFRXmAdZfa(5Am~gQKw9u$Q>J9F+So;t0C0LiL6YM5H<7Dd@#a zH_Bgzv3c}4!enK+-`)1K%PaGNslnvd8cNdmIjSc?Q&p}vg89{}lYPhm0m0acVj6&@ zdUgN7c)2Z^{+nLC8ZzkRf#aDNzk8Di2|t5$}#U%Q+`ang%%t@ zXcC?~+99dQlE3{C4&HS7AW1SaV@*kIMJ+i1LG&UrZJp1f4k0W>v%%&v(-F|XQ~n{S zJMTQ}c~0v3%t~WOOReqf+}N$29*(k){hpIkGPv<-D)u&dm$n>bK<^@yO+%I%$l9{8 zv(IPwGC@=2q5!p8;F_!B3w&*oyByG4mp5+oEot{o^FNOE+uEL|1TRFgzJG za-z2Myj8DA=RUZt9K|ZMMECV>NaX<9bnhhL40SErdziFf-`#N<)|EM(-m@LPSFEPO zPn^oX2jmMw{H3cexh9^bok786bvZ*t6)GB88c8YAdx@%GDX#S;SY(jLq#E&>rP_MH~XA> zYO z8vt@Kea4;JJJ*kGeebvRp;ZR^jh-DW(a~OvmHxfoNXSYIT0r1CG9I3kBnV#VF_F?6Oj!yR{)RcG7(flEGRaZL0TMoP*{+_ z;Y=yuX{gxkjcEfd;&4BN`R&k5wSbSGaJyDGUt-{KI6)g58(VP%H$PV~4Ga|TW$ixV z|E`5a#{%LBpyjp<6JQ+LOr@VnZ9VR{<1_Vo!=MomsNINml?NLP3=DN|%a-Sw>~RtC z*b24`ejptQ@wki)yeWu3B>>>fr@O&OMO4t*us@cKsmLn(eqfg5F2c@g+9EuRD~N?A zGyBi$cl|)6s5l9~uGmp!v^tA5TCAgOy4?!{@-n*9MM`(av#S+=yt2K&p6%OJ3cYY7 zjDgCVMacO;qfutQX=Vsrwg*xzC#JmtJPn?6qv@0rHmrg3wgnLtrV}tOi}wq9gqE z7xZwywrvY0VCo#4Fq1d@bL9Dm0!>hll+wc>DJiY+N8+Z3%Q$jOWX~C1NS;^(m)GNH z!(633dJy}ax_DjK8~qBn3{Y>M8>$0?n}S5)5x!DOV+nW2GSa0Wt@N>PDdc7=d{~Mt zax1qqk8SR$j6c?|gJZ(-?DbQur*Kb^EfCF#1PDDmssP|;-Y0EDfl)|4+<1e7%hapNF6Ze5FI6E!8x zdObN3lqXcCEW#Bag$X0vm=CIZ3y9fjAZl~>-&)Qfhic#5yd_eT{}RW&}OmX7Bb!(k$g>5;%HF>As$OJX@Z?aoKm42%rFpDfjl{UVoqZmB~m%oWC0<;hgKp=yO!#+hGeo6|mw>+oJ?uLl@ z9pIxA#J5`UUDhXlY{zFPQ+;#(}aAU*?_MO{9pNE zK@phaq7fu=zM0IcF>>Py(@bs$zCF$A)V>q@LE zCZmIQwUig@9Yt&nq04C5wfK@f!t!sh_=^Xy8g8~6yc;;9( zRD5=)Z}cPWomdYHuxaLEJeaF82B=6G9t&ggulKMKnDVDd2@5q5TFgzyH%HUxii&a0 z;b}UNz^!r4`X+Ec7s|`a)9Whg=EgPH?mx2rv>)e7|9*a&BkTM4lNTP}$S@Nn8PZmX zNK{uQS(Fj(w0E*Y(A#~afkY$IDr}c)`aq&k2c24fq;v4CFouF!EY2#mccIQ|91@rv zp`91+ic;(PNdH7A#HpX2Y&<`)J&@yku2mbFM@|4E|2S`SperMu;Gk?dUk#I+muHt= zR`e1sS12+0gW#DUo(DMbK*2IN_Jq=np{ps#(_6ZysQG5E}QHfa6!WZ1<+bFAk7MeG6mQiM0NvuUgHz$uz7FBV*j+&Jd?fWZ zT1S@T&nSxrX-c-chw)QTbBljZz&uB>rJL2Y75m=Tust7}^#g1q?M^TeA3o3@KOor# z+@X~>LW4()=0bhr&JS^k=r>Lx4@AuGz56Q*0xYcH+V3h>S{(@H-;?)rtP=Y9IKM4f zdlK`K>M)G{Vr-H78Cfl?b(fpQSf6|{S4}XlIS)^Yy`1~=yJuUj>{mdN3W%=f@NQeV zyOWkCDDr@sNB>RvqW!}Z&DZ^@Ey6(o3MMH+2UuN~<89+1(u26DC#ee-GBPLx&s)EQ zj{67@=KUUw?wepskv%};)7orD<0g6Ai|(4nl;lsZuX+-mKIV{)Kl;!rBQs>8_F0zR)hhg$%ou*WBsCc{Z7J%YT+f=)KlOBm(9DM$( zujekr2i3%r^VctwZE=BU7Q)!@Wf}^ocp8-k-ND3IR+7p`q42afH$A(}1L5Iqt-7D8 zKRc?5dYlqKB9H`LeWAQ;KMlk&iI)n9mAWU&Nbx+Sxy{r!LAhuuPNWc5gC%}o_h$&>KFis&V=h8!=JHce1wqWd&l)9jWO^CZo>4gy>b zv}B33FQ`Jq0&}El3d9eK`LkCJ$s+e{W-F*t+>3B*2d@;fLyaH-DyDy?1Ji-(cr02~MdwUj0mMrqMuuY*GBe9)M2|^`n(^6Dd zzt;juP{xWZKNM`PrKMF&IR)`u_!ktQk6)MG*>&agCA;rDri-nt zPqcxyX2J(IU*x$MplgH&@Y~d~;gTc0xnoRGKt2KkIoe7Z>^2i~$=igJ57awt2i4he z&aTHxLEeLppUKu9G*S^_-u@UD(g#~K%13nN^P$v`Y5r}S9%yIC_O}*r+0dl~eLE=v z$DQ0aIvln?_i)-$?id?VS7WK_q9(GaqK=Mz3j8YlgJe@dyjauiK_;&;WbLzbA7 zbYg^wj;=;-l$C-46Obtp0fxU&ac~eq8cB-A%v8gr?nya?_xD38ZI(TZ*_BEJY`=dd zSqCK8si6;kJ?yCq)$)?4lwIdbsxcJWf~KdFEUiTDx$QISQoqFz<-)FNw7Rh@e0~|T zky?&KJ#c#9=wR@)!GhL+s&pDeD_gBx5uIgNt`CIPX@>awER`WQiF}tvte%WxW;Sqs z=;d8pURGIL#e_dAVSc&>7j$AOHI%d$WU?C%~nMJJj~|LMv4_rhZqS?}|$j4&utU>#t8s7-&zX>8Zpamvq&yBP4!&z37F?;F~PE@lY zi`tLBOgj^mn6Ln6vN3GZ4Zx}f*imv~KgBMw%rNi*7%pP`z=d%oSTPY%z0nz;XmJta z;$2zj=~W!U0m|0TUl4K%0W(HC`F=(hmltw>aJa0ziPx|w`6%<;PdIEBB8l2xg(hmR zB#aj8cU-?RRHvp<>%McbTDI{L-0V>wGO5AwrIEEZsNt?M?1p^;8s~HM&S~=2Lg%&} z&1&yrkaj77JC#9PX+GJT)a&R;Iset}>*s-1Dgi23%<^6&slkxhm zpp%>3Oi#k#ReLJ`0Dt>#2%cET`BjF`uwkevVUu#7kg1#x{pT9FLWj_CX+FB2@}vKurY(q* zn$9ixCbFU5>@saZuiK0PP`y~XxOn@gtdmd==)1i-zIdJhmK#$7kc=Ilz4ugGA}TZ} z)FqPP<_@?0vyAV)EaFXfpD>?7g6wKK7~44&H{~# z?K!%`JaEWbFa6dz@oc{lh8L9|tYkK0S&R>+X!q7pck`$5j&fv(Xe+L2d{SU5pt-Ms z49I>h(+DT83z!TRw4_d*Lu76Yf!TG9?hJ`=JO_Vu=So-Sd%auYyz$vf2q;!wjlJZy zA1-fLUk@SdoqH62d^$iO;2BvGeNW^QkzX{>%e|W(V!uiFYqUG~!|3`zT;8}CnxgOP zvTNf`st|wwnh;u%FSOUb*n0GVo20)5a8Y5c7aIV=fBByxMK)YcblIZm;HfDL{NG$w zq*6AC@J+Z7AC)0vi;7dhxl;O0vspkOp&N1&K)m2 zM6OHjR11Rw#1HunlLg3byBWUO!&18W^%nkS{U{mCYw-{}M9BiLPw0R_rc2gKMayZ| zk!FqE4fk%g`J{>4c4Cr@Zi9^!*Xxr{Xms>1K(+h)np1yRK&e*|x^C=t)jNXqHy1jf zddIr?eBOVmC$G48D74MasM3X$_z)k(^ej8(&W>>J!j)v5557RN+(auM+8oRssZm@4PXW)2s6f3-d#76}xVUh%YH$2U{0 zcltq@rOjcrxSU>~wN4Q^yoUlFJ>+D3F$^{R2Z4j_XJDRWQEfWvi zqQ$~wByvAuNl4bpijF4agMEIuc8!_1Eo=Y4dc&Up?K!y7F>(&D6AdjnK_(B9Zi(&HrA z){TOFPe}T-(c9xdz}VqMEFSq`yJUk3!432WFZ;qL(E}RC$7*iJE{<}{`qDvuCtYJSY7rb<{CQksS62sFgAd05r7AG#A0D3Zxns(XvKR$ec? zrUf}ROvwWJzqQG`0yiSFOb0J5<}2AesYBZDkv~Lv|Sa_mvR)FKNF4w1+< z9aBE*f;&yAa0wCLV<^XGj8;&8xqFBi+#5HJnm+^j{QB!Z%^qD>8Ti4aRjCkwN(g;T znh*0bL*M}fCeJm`RMwlaa>3}t&*726L)pBelB|Oi(;IQ5aT6D_)vhNp>}BdcUcPCU z)Uk2Z8L)Q5hp?+s2;&J+vZZ2W(~%6$epTzhNgURKmRslU%dnb*z#m)SW&TzugnFI6NEN-2M*f zRVTEiSN$Uk3Fx@@y+azjLPcn!#R(djqOCMmRA8x8?l4`$D*w`*F8|A29vXj}(Q!6^ zc{=aD6qsak;(hZiD+-Z=?P0Km)ie<~sc@i^>v=m~$C`hHU5MzeZj%Fr%jeV&>|?4q zQ#EcDjJ?w{7_;c};^X6z0&Z8hypK)we2;0yv|slGs_Bu_I^#|!*1K`g##waQOYc{B zW!jcV3EUs_r@)WuyOD66y44wxfKX*BLmm5`B2>~yqqC)78tTh&=BeXUAKR}&D z776j__ZqK_{Ay98+6jW*WD2FT{bZFu4BNrtQB=?f4DG9eN1RC4bipWQrZQ>fXevgA zN9q0D)|6aH4i9&6jOwbWn7qL3$%IOcIHD7RiIeEmt&$~={3j2>eL=dDfZf?SQP$`M zmk6L}#GYXKDlw!^))@E8CBOV;Q?F6V3aDuNov)1}@WPUinyV7<5v6g&##@ytG`nn9 z%~zh`V76Eo*pU{!*g5+vaM<7{3)-T%9er3HSykz*V&oJbYoS5G=HrD~|#HM-f zjndQ|o*sF$Gg-m>VSJX-*${d|+Q`quU?njk9kaOl5)!^@bAIV|b7^bE8FU}@$JjMu zVBD01le2%iOSM=2ExZVKUanqfac?8e^l$uBly%lsKV7jSpx&%;lXYe&r@*uKrk4tQ z1k5zyryi~qU&w(ZsRoHbau}O|8d3(8)H51U9}5~58;)*5g^}B#rs*IXn#q3mSG-uW zQE%Ru?c!3?C3)`(LbFEH0Q)2s|7U<-m|f)G4RU&WEyBtG_c^LOb@zWs;dQ z{ml|3IsWuXe38L~k5bSO`5+3*rxmT!Jr1=*`c^+`34S{?X1F6eLj&S~1sw#iptwCmrxBVTLUjaFs0WDnXwOnLh3`!Q2NC3h$j8>eQGdZn4sLPW(KSHfLt{-O~sY zf4r$RT9rhexZH>sYmuL8ktZ5ltea&BwV0Z(tW*Gr#?+7Z5qUr5NF8pRX!J&5#^X{b zIMDiVT^d7&t}d6TmYxZKJCshSPe0j`7npcn=PVMZEM-W%U@I zt#vh;_P|qL;+(cc0D$Xvt08YR>X2_w!PiwjvIs#XT+9dGhAdErnw-?$cHm9~iq7lv zz>V!SAeVEF|Mt_q2Fpe_FU9XM^LLwKpW*34wTZ13f%m0_N#bS~+U8c1PYzdu>^tGI z_)jXNTNVmvExH$9q(okAZO&F%Q}3hAojUQ7PNAr0h4CXcg_svBFZdRjB>K$+fIBh#wkU)`myr zBqx=AHYYND-F_*PB2l5zR6=EBKSzU8v6I0>-`r&APxIMv8ykB8uGGID*ID26VHE31 z=+Mgde#=nlJ(QwCp?_;X7gUMWMQa(5&K=yIXD}mx1$~yi!2)-LBIbPMKJaAULU{v7 z{&5JFldP!=sn9g)v~ozp2G6QPvT|~bGk$!>&(lj@Hc&TF>OHkAe+iod?nK;r-qA(2PEnEP!Nc@ z{qLf5fs0oXBUp2{bJt9!UIW*>U!?QYzeucNh13>8)ytgdt@db^Uz^9) z>lsR8J+qHIbFq{xpFA8XRWv;m^_vKZpY!9(UUUHREmltF*n5hhKg& zSAl@-nFaN%{gFEuBDz!>frO28&VO9;o!JTa@SujOcu2aS z@y*<^-{f|`5RT@m2t-1b!vh0FZ<~G}&b}|qa;LnZpvrUHWdypnNB|CKH@myDBM38` z(44--Ux3%apz)4iO#S}gaty$ocS}o_jAvKbphaxjVWeNQ0t}d2Ie6lgChxwGZhFBww%)s2uof}K&whQm^Sf&RD1BzwfWaF4$xFW55oHt4 z?QqPy{uWkL4E#t)BFjkg9!_BO^GI~+A8uAI28*1szC8QttA^tMVgiOnWWeu?iGhH{ z*0~{iX=fmlg?jb*FF}>c<-72qWn6p|_xT8eMEKP5P2q92M$}!{ploZIcUUL%JqLU) z`CruNsnvc+>1_5TUu}H}3<~a6{DOU4`hRiAH~Xsp1|s+Vko5HRZPd-`_5BJ$T0XYL zxNLh9Q5?*@H(zPx-Wj=vvF&*N(FEbjc~SN6kmOy_^=b~y7%xDsbtAN#DZ5z}I~#G1 z?rhM1#p~`Bkb+TcyWa(-YcX%j*hmIhVUsd%vfGp?il*GILEiTH&cJa7{tO)<^R z_m0<_G;R24C`X@{_u>)3ld33#-DL74DlxBhW+ZH8N~bjsxz2|(TwvcN^y$=@cU!VI&F!m9+u0}0}px6t{ZSZKmZ9{C~tKc~nt?*qPx=Zfh|# zzpVbhA(XBvpFDVd10J__948B+l6kI?9Rb_Ne*-D;UH1MMR$?!(c-=7!3=aMSq>M6D zQhKlBJxaVp^{k4Ai;f;JtDhAPD6biI`QP*RZvlX14D`;^u7I1@MgrybXHKcH++R~S z<-L@KkGH4TB;|mlb7_H7Rq`;Qt!C?<{1WRE2iS085-1bj6UsQ+9*VoDVddh5h4sbd zYH5$iuAcSVb{MsoiTggDg7?UN)tS;zQ#+kDFKJHWGm4MctCqS`lXdH! zT6WiT%nZ{?7P{$YyG24DaQUU`5V1QCZ_$ixbX6ITJVzrD%t~P+_yAFNiVdRU;lFT8 zct0_e@xXBA5)a9yRa)9gG67b&8~m2}U^DTIrs@oB_|m1GI)u8-q2CF~i0Gu)s$FU>%m0gT(+`?&p0+ZFW{d6W804}KtDl8jpGMASYVeVo63!#MbeRk%* z=#h}E!_p!pBg=o0CJQ`hPu*tJxocU9RA=nZx!Iq~K^Ko94DdW92ve57mQ%H#UA`p- zif_&c@B-9({OJOx!ykc)d3|f={Zs}6vLX}%Qu!!f!*9%KAdD(N5SowaR6BuPMkl+P z8ymQrL!}j6BJcV9NknXZ;O&ZMPNSXhbxsMKt#vn^{fdY}s8WYIi4{bEJh&|8{|v?0 zq9fzP)Z4K7w76_kcc)Mv@0FR!pC7fA6NOlI;OpK>p$t6?lof7mh{rCsm2Exh(=|n_ zQ^D5~qjr3UPA$f}zs2)BM=_IdFPHaU-Lz%)5~Mlk9DO%~k^6==YbWJh8>+&l`v>_v zg=M`ofgKwc%P&=nsAZsSgw9r`Hz6jbAJ8-2fk`>c$3{5;Ql=^5x0-9+kJOnbJT@y4 zqHqa+!QUT*ffl@0X{XVn&ngi(zSIpvJu(;~q6d+SQ^!?KJwzk8D+y;KQ%yoE*;kZJ zPIcPbISl^x;Mn{bJn2bN?1_dF>Z`TbQ$JVdnFJpZqBAXC+N8*queDn`;&z&)K|<+R zfyQLwii@KRRcUs=8MAtRzuU`i&eyykaq-e+Rpd1* zt>)-9jXuo5$Su>lM3|USrkOri-jC^D*&1kT+n**5NmYn2O#lG_QvA(r6Yu|lF;B*j z(WoOj8(v=)M3au^FEJ8)dZ%r&o;Y@&E?6lCl^+UZQ*moluXA*oyo&Fm&0Dm4vi2wZ z)Ek>s4o%hF?Hv13+D4O?n_c_u+%+yYT~`y`qXW=Xq?TcLoalYB^C7RIE;uY%o@vo^4jAG~>t4XS74| zWYeG<^!Lj--Qb z>|*P#pIOWy%;)t|$I)$YL+k_RO_9`75cc~{xqE`Avt!Q8<&v+D84as_&$-X?R%z6f zH87O#iwpbex1^>ToDL4b8^`;su}Mi}SRn3XhK>Z*?RD9&!(Z;B_}RC)Tc5{VL1suj zgzoSzt3`1qtIl8h4L<>a{6T0<6dcp=R|iy;y*=9cBmbp?;w0PlFb&u{E*JA6*r@b_F@0h+$PpC3GtnF?}l)NrbyrsOg zBZXFZO(i^*#JdZJ@YvMUfEjDt6hU!w^AyrntneR<@bg9!k1rOV3d&I~ac5_LH|=Ip zQ5Me{9nC3{CW{&LMc&%KJ{!w0lTIIHkaYQFCUbR_mZoN%B-A3mfhDu{V+7MqE3T_t zMN)_PQs_Bd>YuTuRaWR|@F@q7Z-Jh#OdTv4F3W}N)2zAVT!MTKh7$dN1o2m0bd?F^rowyNqW6Y?v7^xW>#K_@y~UmyZTQ=gx+^T zR?GF%>ez&zgrJR#(ny~T>^n2ioek{flqpqy$60lK)$d8|1T6o zCR_ynPI%vAMXxX(9lF6weBz(c}PB=)zUj-eZZPpMCu z02U1JN&CqEOwU{w$Jx!6=^Zaz-@88^cT@05e>xxZnkqzH&>eXe592uyq38)H~?ZD|+NWZtVVG#N5nY zWygISQL#m~@aT%Og1MRw9QC@XlMwVw(+v&;l_)_?cR@Nje6?e%m2!4n8P(5*LVmnU z#px;DnwuqH+MqwxduLn-vEQg5d@Euq4%c}0NxR;V&jZr86t>|{805H@I9s%sLhOeF z9&+M13yfZwSxTQ=2*9qoi4z$tetEZwvdZp;$0}sm%CN(m}(# zx>BCm0SvqS8}xhg-_UOsJ41=)5w(xOHJ< zd3Gn??NS6qe2ju}?{`(UkG8_X+LIDs|@0i3S$Bt0#m1T+6^ae*NL+^ z+-h&>#6OPgX81c*veeDWwFq4yE{1*x?aE}g@Zc%heNh$a>F*2{ncA`X8We^#yvzsk zni?A4eoD_KO&D0b{HfX)c)h9JHwk68REyv~zC2m?xZQ_}YHu;<;&qhC8x0N$G8+1x zXI(4&EX7_vtg5aL$Qg+@nhz*ke$3dCg;|6nYy?C{cXq=u<9k*txk<;fG zS+>GnAxCf7sqMqP(u$IPe{y&!0F-@VX=9JUrQg#Ch*!DBS-s3FrPomgroZ^O3fS9af$BHeHRo{tNjZ}^mPWst7#s=T&qN- zAz6i3NYV?_7e2>>fC9!ye|E=R2Bu$yg`IyT*QEW8W$=%~juifGnoT{?S}YfHIc2{` z+85Kv0P+tfVJKp?2(P#pu`}tIjgaNwZ_+NOUeQ@RzY`_H-O)hLf4s;W2)T%Gr;4h| zB_4}G;#l*A(cPPZ*)ah9m=x;IM_$w}w9ho`V}3A?<*{qcdO^ZP|4K;z0iz0LkgT~1 zsYn$wA1Jw4i0XK+-<3-rju81_fBxr326z%--}EON*vS8+)BPtD8!N~Hz<*Da|9I)Y z0oy5R;1}%SX;g1iyni0q#6ze53)GDjj6y5I>f^-$P?IRMM*D#bctpetp4}&I z0i*uta=m`1!ZFR~SPugt4r`5wPbY*StG;Ogb)`w&CP?2|{NoZ5R7>Q)qWneZf!+Re z#D{_P<2U-!<#*Qc&8lNaE+jgc%*M4= ziNu>O5<)`lW~S=I4CNXhZh0X7W+5O$RK(~I9gZfYpp*E~qGiXJVfs~Z19$lv)M)Vs zmCWgQxdz?);YMW9C1W|j^>789R$*tygpA!e>wc-p8|f&>1;C=$e=LfVu#scXJSMQL z#g|6e6=kRjwprh4(1Toxq(

TBs}2JK@NF0D^U@Hyu0{2g+m2$DwJcY zgLhXJl3fybDe=64b-S$d<5zElxVUjZ!Iz<+{RRVHOiZKdiHVo@Mbi~;_6;_0IbDdp z+bNF>R*K1TIB3#nuz}eLb5W5VdTDEXXg17se*%wZR_HgNUG}_J9rhXTTjB9yqnE4E z-q9XY_9WwJ1_3UFRtrG=0uP{`Z~|j5eMhA`HDhF zz?3UgmNANeVRr5Q$dMYcQ>}U^>U13F<>G)buC67-;F+it7zBG#m?tWz3?W%M$kP`XPf?3^;4_66?}P|g8`Un9SR zciH2r(QjZQhyoTrAWy9a+)Y*DY`^g8kMBLN2Yx&=gXVap^OCv7mOnN)w_@g20kg2~ zde$m6sTqm;>^3S+wReVEZ42%vMcLeE;dp@2lI?nBNM)Jz6@Cc1_3@`wJCcj;+K5o@u`u1*H*LNhOKVdEOu zoq)&c`gUx@~>L=eo;{hIJs)v{V!1gR1pbIuY%=SDcqB>-Y35!^^GvMej>X2?T&v=K<%xHh^2s z^cpfhBVdtSqybS30Q)0xSqU>4Qw4l(VW!}e9X<6tpRN1o)dSGVc{Z{%FZ^AOmx7=C zd3is+aJRBwyC;~|nkVcudTwGrejcTY;7)YNXW)&w$K1#U=| zD%m)nN~hYHq3a5r(j5fdxK?B9ZCoPLQU$4+csg@AT3W!khO#`YG{b^O#9t&*s`N1V zaMGeKb)c!DHNRq0^|{r2PgmUR=IT@V$cNMdKsFaz(}?y+la!RSEqd8~j7v)DGC}=H zFQX8#93|$Re5SEo8n9I7{;TP%;sX1&nAL7tez}m_wL)7MSxd{v0IWFk9i?Y52=kKF z)Q&5CJszt|Q&W1KR0Gk$a4>F-%Auz3jm+PGpdt=GjbnkKHI};|ab0?)suKE%P)H66 zt~ktA!!1zsPzVvVw+=hY`FU%QF^3wDY%Rr$aMU)KEE+9p1aSg zC9|VJMP&>}2q|icPGu~^iK70bq2Xrx9{cU!B*fARjt3eOn;?c3wZ%Gf!VbU`BgZ&I zt}@J5Q%r2JK}u3EsX~`R*i=Dmbou&xbR);dsIE9r^7|NI89!Sfy;1icS=W>@q|bCH z4@sfApVG+m8oeATIETHZ4>ni!%eV1}p$Zyzr8e>=)D)L_I~tjYo{lM89R}aj#=a<) z{lP)_gssbH(w6jJ{!$54Mq`UXWc1C;UYkV&>Ph$dr_Hk#U{ripjMVs^feRsKkCVhUeqMcy>qQTv4{~`+k)en41 zHdfeyo-KjNB5EL>ZfmUkQ1XRG{gBucrehudqTZIQ($^}l6@ZLvwNMLEE^Iecl3H>& zw0678kzl!LALkFj<8z1v@d&uR@=dU0b(fcPcZyJq~Cq&*@*yTXKj$h6(P2dc0f%bKHg((YErKE z@pRO#;jbUHh_0*bkXb$tAbg4Mb=U;s=UuxO*jEs?r^#;H-3LQ6`!P(t>SFrI?aCk($3LScIS`8C=8fDRRKU8eX_tgqRqTD`hx zjYB`M_2_7$HH0=<&tvebq?A-~Iw~y`3M%q}An2nySkpT27maeJ$fU{WNU;$vyKo26 zgK-ZZSzLT-IKg5A2@el0Q~MpFR=q_~5R$KikPm;RyG1w%zb@g1yTeytpA{0&2^MXi zlJ_!-2j!1uU$`6;7aC3|%SXiGyorB`)HNFSMLMgJX+U3q$i$_F!sx%?4|R&1)N4p^plXrWs&y&Zi$bfwHm|Vg7&=|8|S3jyquZYsh z6?mAsGaaM7jihzz_Ft-GF06wzx>A2d+jNdA4P0g>}Dn zBR?UN(9K(7Wer_o>0LI|`c!(Nkf4AmEOxm+<8a{O146t!zOxZQ5r2vIBd!{Me!N%Q zThZ7ukuNW*FAp@YfK1_ZMfr`kt}NXMZ^kF)mj3A$yT;aa&JexaWDt!w&XItYI!Sp) z>Q4^PEroT@6;hql!Q81`@;s#;Nj`6XyzU7M)61jRZ=YfawCK$hz|Qpsq@Xx}=0!`8 z0$*}cQUrw&;!?Tx-`+oso6(=fYt3871Dkoin(i7rH#=A8)|7+3)fEj|e>W3Wjdf1^ z{``3Ua0tdZE+(9Ov79Aj7n!NzaiKjqJ?I__79plOL?z7~@braLv!FV2TR}W{ zm(jtY8ieD{_ES1;*z`uS6pKOYGsbFRHe?B6Xv^(MmkZ1MHNA_cUy_|=A6M?z=3~By?ouFY?K(Kwt>0wOA&N;Neta8vp@f_U5{<8VK3JmLN~)#i(25{hv7IVOwp{JzppEny5;GjE zZ`Z{^jm6qr4x*0D&w@f#Yb8SAwR;j2GLCh{4_+_8xWb<24Y%294wOp5K5ZI@t+1Du z~A{z!xmj{o_QQ-cmj3c|FZ)&zsl72L}{pr^+Rf+J*O532>3hzp-H0G+@Gq`Lb z&-4A{s_JUHE*vPj4Mti$!{_F3mZ${~5uF_4-R57qEvs2x#I$}Rp{R|YQBqJC0+xFP zO((7xIbN4L44BTQ-caPk9g_Gm%TaR$_rFV)TZ zfIE%jQ+F~ZK)%VA&#L(&HDZ#)qE&^Wv%4M&keB>S9QB?%>tgzh49q#I(K(XTk(Dn z%sVY1F7e4jr;O|`8qYi5nd*@KqKI^JEZqkv$(~;vI=S9doo}n_8_Y2@uL$lgzHvs zKgvPgG?fUTv0B$TTDp+l)5+Pn_CAG#^>-jI*3Wmz z4;I=pENRJG7NX1DA#Kkp!xm$kbun)Q)_fwFVsGp|x;I|&{~UfF;mIi}B_BpM=E#7w z6LzbhJH9-1(9GT4f@68Y21hxzn=~?v2g47J}TDh1x}=9I!D%`$XS& z&L6OJJS({c{TRYANu+N(#lI)C_;e7jh`IALp>tOV9~YMcNwov}@oBL{8PHTY*xXV4 zm9RXoHi3DIfBsjjpf%MS5mk7Fl*`=@M42J9{%a^P=_jdGyjo8joOBK}6Ri>SSh!hL z=06d4fFq3#A%K+YFU*#P<@v*Xnv%H{AQ1k~>Hg~*KXj4oYT5yGN0;dT1(TBhSpz1V zt?hgrb75aESGO>9CU3RlLobnv0^^~IJMa1GeS*8;&YiOC43)N~MG zvXa?3Xz-P=qy59H53&>s6ikELKU$gNbgnYy%Ty-FTI2g zNeG}cDFG>=mpR`1ecu&k&8#)QW`2xoB^=*#&Rh4p-@Tvx>;!6QC{bTvxf3AcRu^olaio%UzX zo<42wxyOCM#a-w9`(T}?eyz>Dps&pxPuoE;ai*qI*J-E7^uCg$Yk#S-zZeYKE8}Y~ zz{OsupowmmbZ&*|JB<0{GU>`(m{L*Z)u0&gdwE}`=2OmVe}BQV?FM}O+$5z^FCD_! zKc&B1mH;6nUpJijOXF2Z=*!{PpLW z(>^CEBwhgz1WlPoJW#e%JpUfi%b&y3sfNCz{et|hPTni1d09%xt8oAD%VcDCx^L#m zQTl(2&~{*Z$m4gOETBc4f?_W4TxkaP#fv!H_3O7`H~-R`6BPJWB_QA+r59!tb*T=Yo1P+pSj*nMcj*pMcE{YtBk$v-5A*sLq^h+68 z!3;391`s1@6PhwyngzWDtL ziL{3#aOntvKLdF1~dJ!IJbenJws{$0$^4*L5MxV;R!k-8S>k&_DqB*rJeC%`Uy z0R#d`yI5IE>L@(^&+EYdWY}%s@E4N&{O<1VeD1=0PA*V>K?w;7egPqVAt7Gi30_wu z4F1f67v_5FpGy8)j{?Ni(#7@#+|~&O`d#m{=T2^L8Fu#H4gK%mKmCMw*#6rR%=JIR z0tU$c`wqV#p8)^=>IPnw{#`1mW$OWPFjla21f~aQLsm>eLi+FL|4(=RZSjvcjsAU8 zLRjF>H~+YE`b~XTh|41TDd`8h$MLPj*`<#+lUAw<& z538ln^6J8evkF(eRzH~WoO`cxHGINr_2zBle0)h4M@^k+MBR(?O>4KRs-&c(_VEqg zD>dBA+Ktbn^={sLNkV!K^zT=g5@|RRe&bTW$>NtUuYqW1AIp=RA*cQKt6ldDX}|RB z+|xg60O}%78z(zeguE*cO2{6+aq&b2@-OA1yiV3B57In)PGKV|G2m3!Xy3cg{L#_h zU7pD$@wZ8FaTWNpQ$Y7fwO39yL*g$D^uI?JcIoZOamj-^W&R!f-&bMTK>v}QJzxG9 z=dXlRkW`6QeH%}M&kO3DSS!de`ot-GcV;+F3`T8i>Q0tlMoRW7S^RRhEJ6b zyuy(&ms@!NIlqUZbMF3>N`998-_Foxk@h3?iw82h+9h~Ro|L;LsJ^*yZDz71Xcmq1 zYe&l+#u4s{z_5CITTNpt;&Y8&wlz`w;!20-nuQ#r1~Aj6w4l~UAl2UjIjO{ z`R?JH3mufHJ~ttqQPIrrsKevsj%e2xQ{G#?J;=EK=OAUt@9O2;NfdM3EERQh10fIS z!=72Dkf&AJjmE^9`6_-__3qho*?( zzFDqR75DDT&2?Y$1es(x7$a9&Vwas+8Kqiz{Z=O$Tz>Z5#MD^ULL68`i(nt&Y+Eg(6^8(_7Uu z+*2FN{3d0QKl+ti;H1b>T{!$^u31%)jGn{VkCmIpW(T1aPWXpc=;$~&I9klu^4{d+ z+(9mPWaB}b`TVl6O$@wI!okx>M{H+qrCnDLe-pczR0`YX-j2|@;Tp-=yrmW&PgONF zwLX|VCYGfdzA#Ys^vHX6CY=T{+<9%`L;tkf&mrSp zAv3c|$kyr`s#&68b|A_lr9UGuxPI0^{P-8M5_W2Lj`?k3Q7bQ`Z+LXf@s`rX$iKe5 zBe)n4-M0p*kmzIl&4A(>jb1whY8hKOSj%H7Fe#HV|7v8l9z|HrYhz`-SFGtb%-6Kl zDYD?E&4!bydD!v+)wQxb3GFCheyox%+#BY1bOh#<6`hzHUMmQdPC*@vm(xMMSomRx zMCK&ULX*^c(8QHNFT`q$ans>lVZWV;gni+@wp$#NN{l ztG7#twU`;`Dh_j1pCa%!>~Db)7I=eP7l`d3s-ePWSmn8;B?L2&xxTKdYqe1dqeRyW z7~2iy*aq1?Gx?dCDx}XTz5BW+b}cS}JAY%dU}@1(cPqY2+>H)VD8 z%C>2?KeXdl#i`lYY;2xQel{o9b>hW`$!=ngXphb#L3xEK39_EYKH}%x77u>~(8M+8 z75Xr` zIZkpluwVM?lcn?m1Tks1J?0-sY3g=Kx0=40IvthhyS(aX=`QYvizE5`z^+HFc17Rg z!nPRcR@fG*i4^+}O=2|3vDH~o5nk{NFWrzw zmZt2{lMe|7j~qqSrPD+&=NdLO7Fwuuu1#f!IXf>NY$ZC4dE>pdA_Qqd%e$C%7zB|b z0v4VfnJ&(gK?h7WhTnsG)e};;^iWwPAJYeJ0r&KHVv49vJM=0C{iOs;M>h8lVt2ye8p5u_ab{fIqPgisLnE92$M z&-}*vUha0PQg(ir?NSKPBo1Vlg%XbqyWjQU1j!^c(T4~Emcy(TQ4U#>gC?*seW2&P zJE#Uyd-(5J2pT6BqDVJsg3Z;HH}TcgNW5zwUrris#9nZ-D>M;Hq}xHRl*0&2gX=pV z4lM$-)R~5Cv~mg!1NfWtU=0h82k%z(pdu#3tqbN?d}AxmYJf2)2Mmv)1+vDpFRRkq z?p3A7Ap8Bf@6-5~Txys8^H z^}C(I2CwsX;l<{!vSW}r8Ify=Q4i`izeVu81{YY>pu%^ zM*c8MXz59rg6*bq&)D1v(2~J~QKT`wwh>5I24l-quHDnWa%eHwrE6YMzUXUTE8@Gg z)`q|vXDRA}QKZU%f~iiJ>rUXvlFQJ`H!$F*;RqL!r-r9P&xFdIH*ve3XVck9V)p6|#s!9aI2z^J+&x%VV*t|kzvhV3iq1f4PSzzKNsfDO* zwTTSz&IuYWtxR_I1`uIOm;v_yysALAIHf0x*K)53Ixtmjen_EHrpN70Jdm5O z>B4@9iEMj4lW~+XtuNY~XPWr|XEs#WS9&%%?&s!X=vZkeW7X_jL&!Us2JKr?Ud!s~ zuyRG)l0yGX1P765#H0zGXIki0Jvtm^nas7+6m5nNpbEjsikK$9CbJR2Tn%g;wAk$8 z?WgvRwSuA7k>x#dBNeTls9O6z_j&yG2b3DU82+7WB&iG<=^830W;6U@dOA!TX5>Uu zyq%oFFs;IbO|#o_^Qy+e69cE@xh<>r$ko3)zUc|Y z#rnTsE~F_xoZFy#y!NdRtktl?RTjmpLYvrpb=6I)-R)FAhcH>IopU%D2}|Gk21zGO zJ*Kfxr!t`&E6Zh2H)n^nd9|cn61fy?dq~+nJZkRLc4d(#pXupQl~thBMgi(My+K4pWsaN~ZCd}5jN}l7VTb0n zAtf(-u?2OK!iBPv`hE_5ftg7&4(=0_)B1J#a*io(8D>qDUMtn_Sk(**%$2g;_G+M< zA1Lzd4T{S`sZ?V7>-#~vy2^5+`c+c%;j#0%Z5)3pEj$0<2RW|NEW%8APerg2PjbxN zvIf_)^}-UJ8r0!i;KNDb1#)4UkI3et0ebXWsGMD$9-^@%KgqWBwVl>y^vaO)hu*PA z=tCb~&E$huNRjRu)V(Efxx?pd90S$PbW+AW%!3xrWG3?-Ov{V6^hE9tmd5Q}XMooK zKskbEUoJe(BUbM>?klo&IDhL~5@wN%Bck=HjX#GLGrIIVjbHD~N7R3o_5DkD!J}=z zd&hvfTJ-Q>Ff{E<6GdXAh3~K|7>*e@mUb^)5_#$1_kdEk%id5J{KAlKs->pz%ii^C z>Wvr8MJ%Pj6?VL{U8(GBzDJozkxourgV8o}y3&wvA8dBXdT;U=58*6UvHvpCQPP;a zn-4xvT5aN*W@Co3y-62l4XuYn{q%U&zyKGU3RZ4wTkP&FMBCr8@ZAq^l-M3wWNk+- zcGuqi!?;?HeAx<4;r-0Gv?7tX0(NR*tbhi-tyk33oQclWojLMaDYHDAtQ-Vq2POYC z`@x?~0g85ukhRxlTS|+>{+t%6(ANW}!`YdVhei7;Hk*#Ea?WFq;)InxB#=A+Kd6G? zpF^XQf6YX~1U>7OT0=_P21!JEE0~H*OUHYymZU9p*^WP#&n|lTjKAwWScJ>CQAwg` z3-+KqqsZcEkQ-tW-Q!>&*%6v`Xidt&@=jvByD@($c{4qp_9q}gV+PjL5e(OrlIz=) zHAeAXEA@D3)C|X(`^+&#(4ON`I_%ByAq`#hB0g`Mmy^CG{BpoU1O+iqm`mTF@)F9g_V~_p>q` zXZ!Y~uO+pcsckDCTBDK9Iif-5U8Xd9Z3S}e#pieS|QU!#d@^W9O|3wls? zUM-ogp>cg-@9FU5q+n+p^u>w@-c3|%GrFR#07XIZBkt*|wRBFK;i(bBb;zeL$iORW z#*+JY#jh?AnSN3}23H?)h<*^}o$%UPUW@I4QMGM!mkn6`i-^dBg2~sVX{pm)v+w?K znEl2rxBz6Mb}r@R#*eU5cb);*2vZirTf0AYDe%Hs{g?6;pUjPWss2@jz}30mpo>6* zfZoaCJ2?PsahBNWVt%4_`8WW0c)RfCv&qTg2LK+zn`0QW@rTI?%=_2hh=v;f#o?31 z7xDn0wDKS|pv`ZaU$uBo6o2{z44;1J zUKjG@-2VSl{{K(;{{t!Se`dlDE*!f8$+W6jEOY#JMOnInKPi84q;QivyK^F$nPZ{- zraL~+=#D`_Tof<3cm8vgBknFG4Qte0{k#u+Y_i^CP;)UW`a32SIYR};gVh|J*h>4E zpZ)2uEUh@s+fh-%XldQX{T1-`{j-pp?CcNo^z-@NP%%X4=Nl;N=;-um7V72d3{={? z2L=U2+74!^5EqAg1lg_De)zreEjZA53ZRL%#Zc7_*b2cWJ&cnKvxv=8RO4FumxR8LJHwg)B`!McMu3!R4!w5CG6-;RV!gxE$b09+3<- zcxm>N>{wk=K+9}P+u~3S;|LtD@t*Bl0M^7kJ?i3@llv}bip-}P75UWlg!3P*@bxih znY4drAUd;cm~M6zv)WaMz8LY>xBU$7=)nx1)Vj?1%E<28d*3MdlkN#QO|S{s^j$-J zzzXesrHX_Mn#giL|d`D-(Bx{OX7!26I{& zseUQkO@~|g)FThIj{@(MJK_+WpPKYiJbnnq%N_H3!9Fr2-xSBggEBHQIvFgy>7(py zS6>3F9?xd|wzKu4ZJ}SN?A{ZtPTBYi{cPv?nhq}Pet);B=>b|Sv03J!F)2w1C8hERU~ z?5brhD5^oHQy&JxiSxSF{Uv5IqN@^9e3NM`1^-==TlaGeLC9<7m+vS-R0eTnH$hJ$+w#BRbyjD3lk0Ry!RH`Tz+cp@qKIX+Ab}0;5DhJ zC}e2~Vu%IYzRV?7<65}w!ccDc<}wDXAyXMsU|fzpWH-rFxwbHvoodT6&P_&X&JIn* zkHd+0PNk3=g1lxvnU-xDs9D?TR%)2NgPvYglDj#0VK~1^L~7pwQ{|NEFxl86Iz+(M z_NOapy4LGLj*kw@>__?B_XACB2D6eYcXy^~97QYMATec`tq$eX>y<&SpseiV5iN>~ z)kc!=pEj>D@=&UI3AT2lN8Y)nxn-3j@{Yts{VQy2{D*tg0=%9Z4k=4T%+EtHpC3`y z{Q3$*aX1fVsV|TYXQ`trM%9W5z!sWG0x*5k!25&Ogb5p#RFV&l6_k{^9?^u-IVz_} zxrcZ;Ibh0KCTJSGEUZZT(T;>oU!Z%160zn3pT5{`O$>N-D!Pt!*quCCR(Ep>KA--EP=eO_N}3gQqH?4K)!)zV1qx^Xr5 zW@+oAVE9F1f-McXUn+~&6y1(yvZPyvCy{BrM_kx}H)+4fdsnx+VPw)sUw9tXQvqTN zyKwDpr@kRIDrB}ZI&yust1E)z7<*VI*;e2{+;g04#CuP8joAy>k2s-BEmALAxb^RO znGcSSi<|hrIFhpR-}%S8^Y`YXMMPeU7RS3B?=N+!6-bE{he2UZ)};Lv{67Rs&kwlo z=y97J=^1ZK7NdqY2een8Xb*lgj6|#s%a>{ApDDBLAXmSNe-09=*HduBq4-F0(WB2*I;p;djVUX*V3PHk}&rqh;EG zm8EX1$j7T@xvYy2>rQP|$vl&zU8Vf_;TwlONr$m>!i{S@RozR)Y7Pq#T(qWja?N-VsS30&)LTg#KP6@wb8?y>;36g0bnZwr9< zWief!a9#D`ygzR5N8Se;avyij_A+s^8!as?nosrf=P|8rrueZqUIw`*uDLWa-}&pi z_>*`ch&rggn?ljh)H%A&49u()QjZz3tvY-*g~F|b^(NOn(HG{te?v!9__6SCg=cTP zu#~P_aMzY#o}1dpzl0j7zKVSbL6~i&wV(5ofe5O{6-I*&d&zu#CQE=-h>%2jD_J z-7GbflemKsKhrPG`1Ld&zaG)izz{M7X+ylQpGe!2ebf8nr++%``3^1vL1;q0!V0!__KDx5Wg@V1Wn6 zvbZbxysxlKsmW1m31(gdqOmWcOxM?Z=3~&kn^og3kqxVjhg5|K)w;2K zO9MYP(#qkr#lh_C!-l?%&27ehwiji!Sv3t#n~fLdn;s#Z`CnZ8=iGnDOKae7v)+H3 zS>!Rp-5)9Lg^%Y|=T+GNkN9wfW0w$g9@XNy-CiQW!Q|%d1v3 zc{l59Wim6*&#i;F3j2)67(O#Qu+TljB8S?bq9|TnZ@FYsvVcXgiATN%1X5I-fWdlrV3|HD5@N7H(LMf+uN>D3v@|j zmranx{mj+w?n+_k(QPb|SWuqOk?cO<_-a;eCO5ma&OcgikQXeL2s-#A;xzF!r3&Fb z0(wk_fU>8ZrJ92xEhGw+?9zFW9I7!DW)CXwWgHc|xkQ9~94~VV+Sq#CWamEm^-ZW> zuaABXf>4X%=so{XG?ii$`^xz9!?W()+5A+5^?3aO1K;F|CjnYqwhTZ0$@twSC^i$; z63(t;&gV=?cW2BK$hRfwDiBh|5MPl#`5|!U;tjIOVfi89v+Im&-KowJ*SFQHk6AiB zdh3<`+15?xU+aVNgnz7HYq7!G1^jhY#g#F~hr4fn9w+cME>OEKLrSX$u7J~m&zoK$ zh#f*>ZF9%#EF1PGToa2bEK0f=eAR3HKC_obXo>k4w!A5yxXeYXAWgjz%};b6)}Qa~8Iwdyi{%J@iHc0(FY5L3 zD_+a0kBhsGyEY~~8O&&bF_s{96{8x(>=CF6`%!bxIKKFNHL4#wYoRz}O_^igVz!I1gTu(Vcj z2@Xt%^I*J9GF%|o8aEu60(U4tOm0cj6y~E^4GWC=W$C$O8($L=mXiV+7bO;q#;RAA0jL@G?zLtZJk*b0(|o2jAbnAqT@I$jeIgHhy1UT*z@#5%i_DoVCUwn&SdX9E#7BnZabPa87+#e+?MmX-%Ebp z_DnWK!d916tCM2e1s-r|(LygyB`dg5{t}zo5@(L38kNfcEKE``7@_1Aq4DE8@N*VsNfKev+Y?NzGB<@P~5?zR;nf(g)%5M z{}uwFo-1`R8QCK)sV}YiE(~Pmd#`sW0fFlgcNUub&};q9pCxmnp##RB+&E3q2JUDA z#%#n(hiiDfzPJ*c1qD31Y(nEw!62Jm#kEO`3tP~($lP(<+YH$dY@?tnKsKtE}D6WUg|K-x05@&y&Nr@ z3j1**^{KfSk5Nf;(J(AjlCA(15+Z<>YM6}y6FjK}I5ku3>gpFiR9D}}K}4q=n#4BF z?{EnTeU(M)LY;<5OxoPGR^{Y7156d+vb1@p~Os?3c(Y37bY7{S<(W-$GJXm zY7MiinmC^G;p?LJCd{o>y|6jv+x^+{+2za2{mfkCfoh(uGTzGi>)~Eco%agj>8jZ7 zaIj{>&pn1^%jXdxH$+11c1`j|Y*$=U4;mlTrP$dg$ksl9h7Y~eYE!ZUg+*qBUS`~A zJ_KM`Rv9l96_*b`$(8r6)2Q#A)#Vyzl-^(|OBCUr;#csF#77Yg0Wn#$UcQ6$&=PM{@ut~t7Q3*?9 zqk(i>Kf$KW}2gvg0t)Plo12V0_ev=90JPX)RJRtVX zQ$Ixg4R%dKTmJW>=7}(_4~s*&LE}E#$te5UrY0KyF;A($|C?oE?=&n(3wZfeY+>36 zeudpnxy_4p-02kwo0;7qAs=^v;iKr8er<^axOaVSlTTW=T8*Tv+hkGCT-ZWe_#TeG z$!U;337abgRo}CE~+gr$Nphucb5U7Y%{4!h&q`o z@2xRqGAlRNqG(p9fsA=|5+cg6<-*pQ3)uYo*Ef#M%V-@o>I@t)mQ(`P`Jxp+;!-k-dQc7-th4P^Tt+@{^) zP1`*mS<6n#M|%D|hr0bl^c-VYcD!H=$MVIB!K{X5**X!^MMgeiUq>K7wYurHG$PEq z8Mw8Zid8fP&nVZ>HH*3XE`R(RF_!2oJR@y4qF zNia6{qsl+~|G%uMT|)pAKQ@=L3poK$&!_@Ov^NryBXR=${(q_R_m?1r;!Z})oj<7b zFxh%}CZBrWMgHR<{Vg`Ym-qQT_C?jV#1n%iA-#F|z0G)g0|*HLDfA8j-Ao|#Eln7NjJG+p( zw0rVH=OP!4VoJ%q~_+cs4dL<8PmwO z{9lFfk665}Z2tae-(l4B<9=DvO4uIZ45 zS`=O-obA-K9h{Bm_R#*%V~-yz&U?^jE~lK^Jtacx2J+CJ;cEUm?hOB&LbXfQTAeG( zVW%rQKKELdwnvVB=<2CfXaG^$Ac=o;ifw%Z5MvqGmn)|Pll2_HY(69O?&NII-Uqgb z=lCm*Q$jCI3#eE!t4sbTcl-;}m-3s{XH`#43=#ye#wpuxke=+}R5gkJPiLx_6LG{d zbQ#K;ScT;0kUD_`}gOz^>lP~A^RxDb@XJ=3_!CAkBYkL4i=#AwVilq z9w(^|7^t_Bv6bQ9-qD;*H|jh&cN!P`NiV-g5GEe6-)azFITD11T&%GUyCLH#V#5Ic zg9QyBhv!XduD(7omq`~%&$83H@eKv@H-)kJ?U<`X-MPttAn-v&Y}uj7J}CIlamB`i zWnC_Ck;Na(G{Ks83Bio(gl-FmSo}byYF+GSo!QQT zEJrWf{=DeFT3b8IGF4}1W3$@l)S2FGlucb+9=;?ZU*VpgO;dB7_5f_m=t&*5BtX5&a8XbEn>RMN92R8s!C$2W~ zbErm_AulC@$}tl2?R@|~dK~20xen>dFs*SBdonDPo}PBQzkVdo44kFm_YKv$=&Amj z3Y=VPdS{fnQ3nOXM4%%4PHZ**NYm#1=1%Vq15VBI@mtu$_5OK_4ogi9fd|t%HHf}z zWzOFyydLAdh%o__m)g78{K8PY=R zKcO}ecl4patj?g8_;Xg*`fCP*18v@yWuhTXSsT8=K31<-`;q7N!jh|;*vUQ0l_>9T zcbWH@iFA5;dghC8_AN<+LIX3tn&k_-?rYB_O8lDY0b=yNUzBaj5Ue75B#&2{vni{V zxQH7a&Nsx4{-*o8J$i>38DdCHyd5fg5N5VYNc|9fH}I%t8SLcK9%GcBW$(yP%KxgG;JKY z-7qj7yas^Ie;l_v!DR>k>-x@oPjEtOJDsGHoNcA!>C{&};li418I;gyH zoHWmajWKle`IFN0TkqarxT9s}nmjn*{F#cOyNd4OLhT$c7^@yCC;Fh*9;fl!_py9D zd5>Xxfw!sqpaav>xG1dTRENyiT*MluupU<*>>Roy3T$GF@G~jz$F^*3LhCWwE?W{knVIp!s3+gQreGB7 zzHbryO3kvrEOTxcG5ACUWu^M=X#+>JU98M;zuPpM-w}RL7*w`^DfU9zh zWSUbwb5zl1rd(4NwZy+TI@!1=XW@Gkf0fVlFIj{l+Zor#k(^~tljcZ(dp#Sd#OAyK zG4Nkp}az60x?89Fe~W6U_-}c ztqvD`$Y4T?1XessYGFg6@{Jx_{}29!?wUR6{$*J*y{=J%0kS!5!9ST@wDO-oRC+lh z@(4$>#6sO*M4A>eBFmmueVY^f=B?M9jMo;qo+7ngIrD3~Fa9xXS{&^smGV`AGwvg% zTp)FEE0M!QQp94?rIWj+Igs`RnYK(dMYWwg=^L)O?1{y}7~iKKaz)NUL=w)jtQ;eIuaO9xi%0)64OOr4;{b`ftByYUbLnp9xD`ahpG;=MW6_R-F&*^Q&}> z-kltV)(dj>!L|>+>78oOWLft;_jsZ5Q&tDbnOk8a>Ug?i zL)un1mqo=n*s$i+yWpDTH}9?_cB;=wQKaDu50xgJ%377HoJKGmr)?pZ3uE;C!} z+uI)Vr5-NpE+eNFoAK;;Q4Os7C!fs-+(~F9ao0zz9Lo1n^weW#&6=qdhzj`o17_zA zHV0Q-4$NNv0`)gv>j*qC`m5glKMORq13DeZBUv$SYf*QJ(g^fp31h?H8>gz=)dJRo zH9TG7%yx3!CusD_NA#dts_JB*gK?9dj!uNn$~THrOL%Zk>$6FKgW-|U=y=7aehkiM zp6@=`Crb!wDn4MB;# zX=fQ;2|xN1WrSxWHsx%uD|D@Cpk@&P6A z^#LN7Lz#Ih@6rG7&5~K~E}}8D0OM-X=~}ShuC&*sfxx znZ|8Y3B2P9639wp6kO-5`H}ZUXy-%M%&H<)1Eg|E628deh^>qb)C`FQ0*U+G4DD}! z8#Q~$BfsFrO}@$AI}>$f4huR>b@bdi3M(~!!3R|{7V)wKND~rs^0=50_a8r-h6Rrz z8r&Gx4-TfMr!yDgp*3y1`n5oKfazAExKl*b%+)$s4hc*n%&5tHq`;ilYkS3cL(_|( zZd_+wIqo%@yi)5)WPSqJPY|qXq$|sNSvg3ah~2mXJzt#D5+)t$#5h(QS^%zVl`(I9 zV>4)%-^Q<Sgx|{&P&3Z)FgSSjEm3 zjlQ9o^g|!5)|43#un`dn^d+7}G_5Wl6GRqB&*%00UFK$fg89|4={1hxrq zzIe)`TTgs>UP05;-rC#}$dgj4g%j@5jJt>0Y>(oOS7CvIW>EXl+*??e-JVh5nM3!9 zEfe0JNqmL1m=i|l$`zH)gQmF-`{Kq3?lI!Hgs8rwBH%wdT6D$m+FMjsigD$tMCT1; zg2g4WkyO8PQo472V(NGI>?0yvq{Bv}84#;{J-sl#-aic>nOKe_YdHRw2JDy$l(gUFlE+@=kkyPeGMdWU!xWm)|uDIz5 z(QbGaCAAtMoS4j8HGco=3i$JLlN2BZ6)(CZo@-k30e%DDLY_I_o?P{@Y~F-NY=p^% z))TNEg^tm3nHmkAh#IR7o%QxHLEoh>C#?)n6=@q;|HU)Vw^cPV8e@beAa}TCKjc^$ zEX*BF5nQ``k4TV{=M%vD{B~Y;4+b?IP_62TA42#Wp7%Y)5k^KEEmzqb$=df@gQJnI z^X))FRQs0zN*~B}jqmcU=?r+h&_J=FJKZ>6`R)qi;mb`yw|w2I6rMw;z+j&l?Lf^Z zVXFJvf~&{n$hVXCb&6Pox=|^1kYkHo3#o$ zXpi>2q6z0uvVaf0P5iLP@kY1Jj5|Dj|5~33*ywc;k z$M|u!ZMDfYK&wZ1wuNDN?YFl*S-SvuaL!9*?yKfNVrwb~Cub{MO5?@GDaG{q;gm;^ z%^ekSNEX)CZNq&(uQx4Gt1QENt!FzvN@Q#^n$>sNId^hO1b4yH>eFFM44e02O$^ zVoouh#HDO-@q%JUo*YLc&)|;llLZ9N9slEW33l zt^je*l9SU8q2Y=|tNCsMxxUz#&OB?=`H$kEWoEW+#{j#28Ki7uB3iA1sVhs>)C)h;MGQ*z@XtC>2t6 zz#q1rJ=o8=-cua-aPXmCD^UMw^W z*$ZDF49z|iwqGS~tqJelr-wdIJ3pNo<}#}p9*Ayu%GM?HGr``GMfCmegu7x&OO6x$ zq+#qvRHZXOEgGmgZZYo~n)kg_<5@yb@!E|Jn8}55Dw_$E^T#{FR7xkPzu?LBWE zk6yi{TdG;ZlCpYW{eWo1%6K?L+kd8`j8^F6NN&>JU#OdV+Z@GyLX6Ll9{`~5-j`=| zCl=|Y{4XH*6X2v0c*2SyA@wC+2T)(G0EI(P=_$_>0LQ7OLy** zp5>!ee{zSLnkk7B_>gVZQJG?QIt(T^7 zp4;N+YLot!baBDaB@gTy{NG^Im&m_980BcEzXiC@iEu@?)rmp!M>2Rfq&Cg=Qwe~#x9mDIm#BoqOHC8k5*EmdhxpgG zC!|b%pVGi@c-;R|DdGH%W(3ML&1hBsP8fz>*)O$Z6HDze!Qct6=j67>H>}1=zWXjs zAxq02P!#qhpg2mq zYMcl7O_9noj};VlWV<$}z67}J&c?QP$ID^zO*5Wc-Z;8I_r7&zF%Qt}%8SGKsrf=> z*cy%G+JRi%c{qmEXY(?n_{uYPGxMQuGDhdp1VWryQ zaYVSYxqrF#KJa5KnV0*Myj}k3Mpzbz$0qi#LU2_l6~8nT{HCJaXMZb!f3|#fK$ZVD zX}u$a1qa#rb-B!CRzi>T;tO(-Z+(Ct4+r$&!4CD3>JdjakaYuVVmIPs67}%&ZXHZWZko;UevJV zsoU0*hVLkG~X>hdfOc2eV{q_AD0^I6X@um`B z4C?eQ7n{w@p^v+gEE#S4(=rc^_IVaYi&K-lV9eloU{o16{N%2e^-xS-2#b_z%ZFcEfr4D{h7s zmCJpf157=hiT6)g0j&3kq@?9#SnYQMkEg*tYSG=cJq=!McE{_&BRbp%1 zz_nfj)|)$AckilH6r{*_VPI(NdW|DaEb-B5;1+>DtPIkHEdyW)$aL#RR+`7_#$*She9N4>hjyXAxS36bcK6mY8SVRu7ai#Ok1qN^Q5 z=7xn+DhJw2d1<)HNsilqXT+@-T$N&qz(CUc*g{yWN-(HPdgm3Gnxbz{; zBwK?e*J=ev%{Wi_%$!ppW3 zd>U@6k@q#Urc60t!6spxcq^??}L@$CH9)N*2HlaaN_zbFcY-PLBFFb|3;G{M;enOYpkAAwws^7*(#QF~7J31sYf@6d#$ zDz(~p`A19Bw~0XdN-$V22wvSTnsYGAjK_Fjg=TKK;}`W*v0XU+GT9laQZl6 z%A(8jfu8{)y`L>rQ>1kZjecC)-8PYg{6E-x%cv;3Hf&fFP(VOHK)R%)M7l##x?xD^ z?yeC5>6Y&95ExRxprmW)MsnyHYT&zk;(l(uzxDq5etqj*YktgPxMJ^p?tRvAoacs) z^BuxO{vNTzkY5;r$_T*NFI4XzAV{kK;1!xpzt6?P1~OenExsfev(rvBwK?IR{C; z-fuOerpdQGtQK2I)WUME)z;l~fmKX&*c4|t`|j4?*0^2o-O&qp?AD9yE;xRV+K`St z$=uyG5W2(K!nOq@rr=Vgqy#Zgwg>Pkidr{a*5VR|U0eE&{~P613>7^^_O4Dp>X)6Hk zK5;Y4=j%`KupK+%7nAMkRBP|{YB+<>MgCG@y*S%Hb_Z(HoBKLMB2duoa+K|p_ip3@ zEl1MI)1t3X2cSIAl%a^a`CuAttH(iYdOer*e=z;$i2q{x>x*0yfTbLtS@45S64)V) zi%xDImy&en1B4fX6?;ohAkQ(VIn;F&_)aKM1DY6-Mqa_54TiEaM;!PX=axrO~4k4})r zB4c#At$KC!O)ubQo5D`^8V&pFt;P%e5m$Me@$6O+wTs0(i@fvm*sXi!ZBrpJO<(7I z>Y~2mj^lzB4%|j2723d@+7qy9+2pg6oJxeZ;;i#oCtl2q-Rvu0Smj;a*D;r2A)6_b z&-#lwOrqOQYi||$cOuw?9S(Tf6umoO*d9Y}&|x=4A?NGVA!Yr= z|2X2J-nJS@mbD#@M=upYzHdkPWkz4|y9_yX{PHT@SnS zz;4xw+f4(N?VMt`#G>$~ZRmgnE*YtcQXFHg8X|PGtDJmipgY-CI17QzCJEC&f(jD9 zBjBo(yEVTW)SiqH@plMR(yxX$FD4U*T927*S$yZ%_8I2)u|I+A9<*zcAVy_i`vokz z4ZUhgQH_Nez=ZIcWl)2gUk4k*%Zb#E3c+0UO6 zc=ck;edbK-gEu)BxEL_E#70?XyeYO~#InBH*b4$nd9LNr@Sk|C=oa#^UqnfRXq92d zqHk`QmH*6AitL*XOFkAHeggX{tFIG8lp=^z6FkwZxBkK4#r^yR6t0|6_87Vb#{_>W z4-lPO;U>3VwLXl~%44QGD!osd!q2D`37gmzVuwG@o~M{B25fsk@maK0Pa$(8RRM9T zZ6N!@D~j1yn~;&SFeBaZC{3+_apc6~9JAX-G1}`Z3fa#zo2mqWAlwU}&h18u#Y!!X zNo_oC`oXhdJ1NST#DWPHv760{bG4IRQ&pLZ+^MX^w6@CYUcQ4(;5+}h!l@a?wezPw zuSr{EJigNpVKQe7igSH6eXr4sc7dT*TQO)hKqI9bC=8-#b7Kz1yXKlh#y!FF|$ zs?ES~2>I(_fwg8{Gq3W%#*=~Q%p|g23+aAA|LmW=uGO-z`UVP;ZQO62B>*MRbbPRu4Bw*gar_5-`O=b2+m2|niqkY0hyvjgeX z6$v#dCyT@>EwXVz+vz8x?7e&Pgm1|_t}i#Uf557hJWe*dX&SSw8gZHbwQ)z#P`XXaF{I&Z4Ql2U3P@LKw!zM#WFj|KJ!6nfDK z+gW@ydL^&z!utK97S?iaD{{<=2MT;bccRfNOiGH;r6!J$h$Z+E*g}b=b=XiJ;+<%R za%|gXGr_Po$`_!$4H=p2)T#4#se^g`c<>M_ZP^~OXI{pV$S)S3aLUea=zG+B!mV39 zi_)T1y|{hOsN0Z{N?M#!!M%xVIrash3hJCoKSPZE454WE@PL|FlDVle(Qjk(G7a!r zDG+{;AY+7mMO`5Bwa~u8Xta;+8!!jW7mUmNYJI~<2X5(Ji?9?LC z@%nkru?K7VZ+<6W1X#JM<~|O;E2bhjQ+9zWh1MP`olPkbZ5^)0 zteF?G51uS67CRWiPjZE@IKPY$mAIX&wGj>ro-ljdi4vTG%A@?-IhRL$bbCS;eNPy! z*Hm5(acE~}MsBVhkh>qVf|`1#KQ<;LY&Tnu*>^4sPLBp}I0R&Ya~SEf;T3(8lX81w z)!9ULrDOoFK)`D(9G_{gYIZ>nTt9A8j#ShD+F24E{^-j2L$SLE9C+#gczxe@Un0A~ zlU>E@!|<{p2?pDULNLea*+}k}{iTY3o~-B+5heaO#2g#`*Eb58rji}cD*=MHVDuWv z=qFH?Y)9;12H&_adRLu>+=<}zu7tq%?G$zsyPW(OSI^r^S$m9k7JOO#iWo8eN3QX# zu%Ap{>D-i`0r)=~CLq8%_a&)Ec9G%Qk@kRlFj9-q+CT)1w}W^cmf| z;g4rUmt$Uu)wbJ{7Vw+RVM8N2kd8HZI!9Oiq$0c6oyB(FIFAmLrnM)7+pg^90PN9+ zLg-p=__0l4@?$A3ITPo-@%{KaIM^fIsVmqHz5!qK>md`_j)TJTqMdwC2B*w&&`Lg} z$QIVi8>j^vR?RULe#Q(CEnE$rFROkrOigEmUw zNfs#Rw2Ca^r!p&u1GX+aNzfuXQs4uhGu1 z1bLo`=JSVH0EIYRXOfY=T&u_YbnD^#@Dxpqps6l)pi|b@t6fvUm`Heyg_a+E_3fiv zq!RaqNveBk_4w+6&CTR%#+f&_1`rErcaekJzHvKm1&F|PKU>EO9P{{Bdy}^#ud&mv zd00dzJCd_~kK-^!6*f)1HZwiu(PlGh3DZtSDGY(q4c`q!_7J%yzIAWBg=Zs-zI9qW zfAOmC_VDG~Z{yn^1TUCqm}2Jrix6b#cK?aQJmC)r?SopU;>7i|I2OTvq+V}^CNXPC zHdN`pU@ZNy@pufMyq06ka2A3;a``GH;lXyY^qzB(VYHI^nC8RGFV9WNll{e@I$m6q zXwSR4$+H!>V{g*vTQ{p&m}gh>1Kw4C)DWt5k#Fmswa$u#gO$jb0Ti@&!Y?_sAuVy;YSroH+ zgSYuE-*_vTlMZh(Rac)`=O^%c2@iF0IM@#ZTa9d(f7@9T?$~|v)iZlxxV6z_$Hd*f zd0msE^#uRiA-o~n+GvNRH1k$_^WY$2KYKFIt5+9CcxoGw%-zjz$L*L+gs?r_#EBfP z&wgF^-_YsJqyIpWVD!=(3MhG!6DRej80z-#m09FELNj|Q)nPeenT=nD3}bVy&E)w$ z_3mH7f>d-0D|X^CO?J2z8(8dY9W;B+Ao~5i3Qx+`cbN0q;|&CcHKh%K7~UdLc50?y z#%F#4wbt?jp5f%PsW;hBMB*H9-+b=&_sR=d=_jsz*xi7>KbF^7_@3nz2+4q+_3k^J zUR}0+Se$=FPO}zcLezNeU7hU&&L%Rv_Ns#aYZRNGwExXwxd$Bh1r*3nXH8W2e<20} zn5s!_Eeyy-ZjW4J26@GxD>YsWIc@kEvNOkrHKczb1P+;$ms>WjgmnKz)d0u{dVLow z9LycsoIi096+nu<4%b-Vk5KWiGDU%XvTy$gE$aZdd~@1P_ul~q0lz8Ay-K5vwbr|A{tv0|1(ts2ctN zGXO*7FDX>f?nS1irhZ=JDa(k9#}tg-BcuK_nbmaq>E9qDqQXyQ6op^NN@K~c=00!+ z*l0jaSj@RP$}}fd1@%bM3g>}o?xwQFVo^sGo2p(-z9x$X0melQO6% zsphHvbPOKCZM;EYtT)L*yHGqNd<{62wR&cJzDC)*ORYl84tY0*rM@@8RFBKZQ8jw+ z8YwOQf<7RNiIP;E-gUmdrfrb0l4a19b+^^QON!E_ccV;uYN!-%twm?l^IG)_h~ z?kS!JT~{`3D~U$ZE0++N&UUC+6Rop>1p7D{&$dh+>2xE;;+~KWxCGb@cLS2chY{Oc`}FvVmL06B8dN#vnCq%)5*qF zE?Kl-l?JaqFTIki?bs2O4S&HB;?5k~gpWt-4Sz51_4<>zMZW^{_S^3uNlKFYjOGOT zQS{%~@h^a>@x-4|vnhF=HP&|_{ibvDscf_ix0q#t1if8S5LW`NHG-b{2N8&D@ceaR z%Z~t1AZ@c{VZEj@2UGv3s)DJ~w|?BZ6+FgyH^4inE*l@J&XHE{OTnkzlK52TE(6U}geSn(xXAgBsich6$yr9p!%-=s ztt(EA+RL z$yby{YUvq6%FU9-kE8+$uNO00c~C^M-&ag)d@^u)|BAIN<+;W_+e3NScHB2}T$=G? zqFk12I?pJ@u%vHu?MZn|z%MJ9&L!4teCmi-<9(pMpG>C5DU+kx~az zv@pS1rW1DUKY9Qdp`fc%rp1qF$FJ?ASJiEJ<ii6(FB1 zROrhzqXR>gjVkrGO-B=l&`v~dM=jKe8f(fYg=T`Aukm$Hs-7+!CX!k@wvw`pE7qlc zv?W$h`{_92LQ8l4n$g!ZD?JV#5?t;M_8%-NIOVd@XF#V%&BI&C*#{NjmGnvx3;6WNA{x_=g z+jK@O#^%ck?5}0GVsbfEtR&D)B!!x^CH_L2{o-Z3LVH>vI+0;u8m`D(u$ju-CC_Bp zP00xf^+J|AqpIytrATK3$seYb1(<$(`eugdZg|n_98#J!#jp3LC6#Tpa3s{9K9Tee$yHSHvDq6ylcoD%YX0iw z`qBYb@$PLVp5bi;WJ=MHg9xp>=&NSE8VTAPNz7{#kK~AzK1(*FNZdj;!v?J#x?O`{ z{{F1jt3#4vZk}TIpNszfc_N=9g{A^|Gb>nHGuQRPs$V*90btSC0NhWZKNWgN=*`G`d$ zzwpMhDql+c(xNd%JX@w=Q&3cxlrjg<=;P{qJi@L&|Km*(C38NYsiY(KLgZ4Y!<@_G z>?Mt^612tZ;H^@~*6{~PIJx$`&UaFo;Ot<(ep0XstTQ1c`1831O$St#T2xK`35_R8 zYCUF*+Bt6K3(XN8iAYUEn*{y5hY0^cBLzD8apiXE+Ct+(xI=punJe2Oq5e569E(g~ z6~LKCdyL!0Aj`b_+lP`G$xtFabmsbHKJZ@H;hm~h@#x*F%Hr)L+&D!BIAy|eCHa<2 zW18*ZA0Y>!_>f+$&MU2o;-QV#pIY~59qD=ETNO>XG#(m2t7T_8Lht#3<{iOx*2tu1 zZwb>u5B4>K#|g81P^&**d^4bU6O$#xZaOXdG|wkfP~bTdF6CdJfHK)%Gs8B|9iz=A z`QF>q2LF@Z6>V--i9pq-B=P$=e&G{PhmW2w7O#YA^2ec$=C1I~8PjwE{Culsxww{9 zvuPeb*zieqFTqPbyWxa;vP+asQ^t3DJI-STvTBcoofy2yJt+Z9UGS8l7s||xF5>&t z4aYE1)HxZy(OiR_r$^tM_RVJY?ac-YXgRWOD1aP%?S>?bxS+93c7FW<>$@y{j6{D@ zrQ*MF#hx3llbbV7@!us)`T9|4jFp`Nl6~v@FIi*A`}R{N($&Q(QM-)rvs z#67C6K*$>$p^t6GpfQOHDMWVMyO!b5 zx33|j;nx)YPr1`EnY5UDj={-qr>gYF&f(MbPHJ5F-&f-Ap~e2~)0r-cSr?jwqEyr2 zs?uolNeJetWB9_pg|43!my?1BM*sbjac!qC$S}{zqHVv|PK03Ss;wYett?Ep=FxOn z8Y(s)PNsmz5w=6m5Lpv=RjA2!7e9puPCxH=T}gL<@F+Gq_8ITI@6DR!b0u3><+`fx zv6?<(_?2mYkU}v#+#rPX{bC4NG{j~yXAY@0F)DQEnr@!fDMK*j^3sBLYdP?3-P|Y7 zQR#FTtz@NRyixSxQP`?=*){h5AA{rBb0v!AW-2-p_u+T`sOqlxU{mOo-hoc|e^jEJW z#0Y=6Yrl)$Z}g8Br20$7|Mle&ttb^pzLVsM0uPl1?arO=r)NBYjV4F@s&PwK>N6py z%uVqUK|itz4L%|7(6QHIl37zOeoM|Vk z^iBF2tmX)YJC2FnZI_}A!IvmN!JU`ipw=ZdIrr!>$%Dp+croasc!_xCok@N*ND#c7 zrqRS9m;L);@DBN$JK%*aL;e$Ja_$8ZZJ6x%N)lJDqE_f5{NlN8>3)Met@~~unI;3OHN^XEF!JF!BN2?a!M{gvO81inSwO|5omcleW$H+kfB1btt5H@Am3`CUmE zTbx2$uF_@fh9dsnF*{n z5{hrQ)Am(DuF#e&H@R}W;(W~K^bD1p=MfqjnkBT1OE`-i1Bh5m%hUV72=LA{194}} z7K*F5_C`-c@6;bY79XvE5PVjM6L;5qD>d(P7ubuBbUD*Q zVKPa`F;F6MW5j=TE`q*vbBKrD=?8B_!FcI0M0yul@4w`aU{~lY0jcU6leHl>NItoh zJ6tDc``;jenQzFl!Do<9EsFl-KO6XNyZ=1Ho)F$Yzqba`BaSqoNo191GspYL8_oMh z&A{#MaVV&qwN5JL4Os$xRbBTgQH~N~Fif{f7h_H`2$UGkVMpG!iOXcky5n$35Di`m zu$!t~oqvv|`Ft6RgYE;7p=vBe{sDvPc2;bCpP z5Z~fvyLx5gZ<*$}xtGGA;!;;zTjSQBl(Q?2Q@zGU5qNs)8umr^$;TdyziALaU;_H6 zo+zHHD=L{eK*oYCXF}BDh~2YOIK={SSB|#`nA?A19*Gmo5V=Sj=)NGm)Kvlh2z0c) zWh}|k&#uyby+G_VrL7xSTfQLa-Y^_aWWP*Y!2mk$O&e3jA=Vb=nc6HP@r>F0!?;M4 z5N$B1C@TfDSGJkzTc+dpQi!;k6KwYQH_B8_JzSH@5O?Bot6gkbKZJBi-3`jji;2dC zum*?teAe(C3f4*GrNZuEO!guI!k+i*hSU0OK$QZg$3eS=6J`nN*o#pi0YVy&o(&r~Glcw{b{~GnXl!*4cY8CWs7-L9zfJ59#Gk z!cQr_MVcPn7ICy7HO=sY&hBs%Xkz0YPkk|HDy>kc75vBjvqkY6;-Cyv)`UFs1KF&x z2w^sHgNT(ffiBu9ltzg$H#4`S2xJp{E|}zl$l_0eP7ZJInglZ5XR;{8g?@G`V9~)V z`=EhlZ|ZU&HM>(PqkC4=$%*c?6jkVy^v9t}832;_W#ReRH+*9$Y5gfYp##UtAR897 z4{UdQQ$tm)5=B1aEy-z`gHe6=7ARL{EXFnUwW<|GA?CGWW4dGy9pQn<4uSSJFZ^N=z?N8DcjFA(h40mlR*4smWjC)>={c+;iIq+h?jZq+p(qzV@#_X=4 zm~!~f2fagnL#u2OT*BpYy}?(KjSx7V&4`|F$X~%1EBzrc0zuzaK`vQ5vFpK2$%TjC z@a$SU>EEKOPWmwzPg;naSsVxbLzHsV?{gW1zCcPBP@>)p_nha&<0E4g6+6L`jQzlj zi_cVy`trCp`}OVpgsChBIGK#T5Rp~CVPG4>m48}*8ShOq!M~IYYm?`yHj$tk!J*>9NVB)=K0_Y3()*sfdp8~Q_@0Mf8Fe9MN`@8BY0oR zXPHw1nNGcAOBHO@j8=({nwX=>$#we7c|T;T(;-?iHF$p@8wm2Kac#n?kb4apnyf*k5%=~Z zg(M_f^s<<|C#{7}eZdKen?~7x4a9eJ&DcI8rq zO(b{7o0@V3zczS9_xmuAaBChlEwJ)N9CvvpM(EG+XwHaA@*9*9!Adob@}*k)=Gq2t zG>&qeJ^xsb=T8#vO@&HzeV709hLi!GcsWm^cvKpse-{bB3j;0+wp14!_|4e=*V2UY zAfbFm@8z`ZA$*WCzJypO+1URoO9oMiC7zU zJa~Zpb_9(+$EMM~$#Y=c)9+Z|m(P)S+{@bL%eq7K<}!uL1cByc~q5)JfJjANk915A-IB1Rze|PKJdsi%-E{X_ND$haa$m7xm`+svV0e4ySNdPgGL@RtZ?EQOR-}4_mVhQ6BOJD^4>lhIN z?)1T*rO;DH9qBS^u9t2E{Y8t7b7ULmO-$AA!^aH?Z{Bw#F@JUH!R{uq$PQEs!R~5- z*nBd%c7IAhkWxGM)Azbz?{-6x)E#&K^5DUAtyf*C$YGP%Gq(+$rlVP#moD9#MS3u9 zJ9)6j65No}a^%ar+Nb$u7ebdTgW_nMf*pg3yyJ{>0~xR6Tzta5(c}(+{bflm`P4S|JIr!2m>(eTTM_tr|5WehS4qhkBO@ptVBt&i$xH+@>wzLw_Pj#{JGnm37@xNE(ll` zALg;2HnQikoMvDs2imJ`c)xb?S2ibQGu{L5rEys<=ArCBt0^XYl8vbV-Tu?X;*6kLR-rm!-;n02=UZ@5su=G4Rxt&*Cau#cFq&r*d~@4+XJD4mIeV8U zLYSwt)c#D^__=}4IxWY?tcPT#dFZ-psvbYNY&6rIx2Y#88Cv|VDS|(KR6lv}P9y1$ z(FF7&lWw!beT3r4p0!d@)nV+)o*5}U;{G66MJC*{))!oC`A|FX_xfTbj<`C?k{{lFyq(Vew~*|?FELAC?1ku`B}fnCU*@884D^JI7Zb+=m@uwy zwRKI$?KfBHwJiO^yzV1Lcql-j=ErJ9qIyb7B5Ta9TN5}j!WZ`@`zINDvR`&Kt?#(M z(7e)Z&>eM+YC7bOs9Xrf=*PS{PoQ4&0UxevZj5xm@eE}uyGrp3Vh)N3=n z_p92HU(btfn%|YSDDQplhpM_*NgZo(4EH&+eq?DAUM0saqB9Q01gjxi-OjT8bU!~YmYSf?!DpB=1$YiX|d_#qbO%yIhpJBx8zT9$j~EH; zWL@3346o({dJ~P7*=;91_gL_c(4p|&i{hbSVHwocamyZ*9Tu#Vl)nO6y#i3Mj$JPA zL#mKo@*)oTg*p2cB(K_sn@)ib*G}t(xF^Rvisiczc=XyDOnW6JF@<1=dvx=mZ-(1` zb1j9ngH9lNWXG=RcUI-los6-uF{k~@O_NmwcAm^LSQ0m0m%fgq5Z)hS_q=~zAYafZ zLS|*Bgl3o%=^?tzt#l{Hpps-C%XWfvF(Q&pb> ztM1f~N{pF6V{QBo7CgTvy77`)0u_C9*odr#<=xcWcz6A$G|I{JD_Oq4?v*kU2 z{w--D=Hq$U#=frax;sIHz!G>a^Vue~q5}0*4=K&X1K454@4hN!&^v8$n0vE~#8z)? zWUV0`-L&K0w!%wmQNL-L-WM??Egl_<(IJa7$i@8;pA*Mxd`g)a=P{B-&Fxy=UT;9n z(zK%ev`LqWNul_*J4TWP75u(U$JvT_am z?r2zBPG3DQE=XZ2M}gfI=K>v8Y{N&C89vkf=25;x9s^;9r9bl9DAVL! zOX;OnZ`BGYGpnya*TVn8|Di0+I?~2o14U1x2O>Jz?ay=_7)=JIW$iHjJi{*q7tycv z#365TFxDXB?)lw9)W~`##D~Gi5*^Xc3&ovnwb>Pms;`)AKB$Z`e~gmu|E}0kSFam) zSjxHpfb1ecPa9l)(jWMcG21uljIh|;f@+(BTJ04qzFfxrU2}^Hq7^jo;}BiPR68cL z(48V$=cHYGq9y`bwzIsioo`y(pR^JP1>A6DOT8dYzFv14ER^JA-70sW0m$>6K>4Ps zcD&`@-)!leaGVi4&}tm#OO*ny+|k)7eh~aCWBNc6dm@WOeiW^$L6ja1u~qHoKOzO9 z_Y)N;sq+2wT1K{-))h1tJfjLfuZ60|h;!l+Ezo_pQW1L;*v9@YQ#sstj_Xxn{ii(U0{sio|GEualv=&u zNz}a$jlT^GF!pl4YLlP((dE`FxICQnj{r^`RD5@quvir-~7_-Vnt*f#8!6w zuKoJ#{4iBz-+>>gvlZesA}d8F*`-&`%NIJervKl-5zBPHE|8HXMYGK z@Z!*uRPK1_cnILh*6sw_Z!SBvI-6|2c59uY;^z^)TfNY{a2tT55}ZrDJe` zF-WU8LeqR5Fq#2&W%$pZPrv_IJ2C=DX^#96t9>j?a}B#q4Z4fnzq&n@sL)jC z;AM}DtdmJ=R89dbxC;Fm2`r-j)&ginwfE=}@m*+fOR$=)@jS{49k&BQ`9O!?$oKEx zhnlU%sZ<;Ik@_Bfz1S&dqv6(qEhnhdD6mUkimhKq92|$VtXn4d66nj%en4k|abW_eTo+HJ}Y&_Xp{ zfd`AHn-7t8L8r**lVHYgu9$*D3)BQc_*diH^(=O3bCbgL)n&G5kSxcjn$R6npxu+O*$7;smSY(Q&Dq>=6r$?wM~ESy=+#RW_W+C@OYujc%^Osx5k; z&eMP=LL<>}y+4OuhsM+Wko>|yx%K*3$M+85aRH!CZ;}>0hlDKSuMdq4j~7D5&8Pla z3$BcJ0v;^KXG+1;c%(#UwfpLguM@(tCxh>5fBwi?1+tgH5efcO<2_15{Zm`*_`#))U(oTlv^4&dtk?yb=|pQ8+Y;-MMW(CP3wg=@z#L zFln;hD^wEhiXNvP%x@#P^trMLMbq_G`GzmaoO^Q*z1y^FmI>90#R5@z&n8q<-Jb!J z<}kat#<$lELO7-OuLZVp0%Nlcjr4dbu^_icfGc;nLQf(GB>$NZemZ6q`JgXYyvz4E z?Z<%Z-PN2!N1j;V2j10C;fw8JK4T;SOvDvg@V9T`wu_{;WlIhnRp!F{)j;YNXrwhi z@ZDPA)G|@4C^hv=;(`_91<>@2hhsB8nxELpI8m3W>gx_xrPUYq2cxx;a^DpzodGsl z+>x!AfdOk8mreA^Q0qN_$O`ldk1CYMyE_T*&I4Mg8HWiE4#>?3u7=XYrKj6uRiBq8 zjR3>T_c}2z>X@G=P4Kwdwpjj-T}(oE#mrn@1WI&qcCPXB`SEf(uDj%9thqw3c}$z; zb#`#eU0ESKgyMD++&#Kcu-2do0ui%z_%bIk>lk;J!Ot>xfWB7qy0+mcNh8$YYSR$kV)n641X{OlM*J{6de3nTf>>K2ZdM>S^5_d;7 z5`J|FZ3ubHH|kqeKlrP2Tqe@kFrzXYaR%eLAW9#)e~SjcLB$sSDu{t__zKXZ*M){o zrbZ~3PdN&+mss^%Qih=^?8&JXMcJnl@;rl1y9 zWmMDir|>;|TW>ov_DBq|$|p8&dWKP+Sl@9nEVA^-6kqXIYqSm#{2-uP5}T|3Y<0)2 zb>HDKFK-kAaaqMgtS|H{umWqm1Zxb>z?Sjq$3gRXqmX^L7=GEzTAkZC-{vfEo z$+7$imclOqQ`NFmh|7{R(k&chtf>f32c%>LFu7Cb`re?Vg*)IIrk#dFBj6(UfOa84 z$6Mo@GA<(X^9mns>h5l@O;T=hjscoUzt1c>w$LfV#2&9+__`lfMRHu6)#8${^f}jX zJwTlByjYP|X{J7u<_++9p9~W3!?B_ssu?y%1%gR5sCb z(eMP=``GTS-$P{}>Ni)g`!o_~a-aQi$VL1swCPR;O%|G*9t`orNZ0nQg)AV%2zUoP zt)HzuCBWwZQH^5)@YEde*`@Kb66l0NyiPWVM>%}XwThJVJBl^c_YR(YLf7|P@d66S z%2vJxsQNsc0KKy)bV1~BfI}%2mdarfTH}XIpT+z1;4{a}JjRW5WtR1}$`Qeqf0>Iv zKs6gk_F!>noV(FfZEH84Pps8EetTjDss>-IJOtPY&$oP;F+MIf0zHt|j*k1;#5b#C zgVV#2@jvrh;AgUZeu=h2UM3r(BzW}H`&@8jNqV7$sA(gu{Bbf{M+U%(@_PX2!bDM# zh$y*I^iw{U4Wk~cY{^#y1a4z6qrQ)g@+7m>RWa&sOTPxHr`((jFBv=85p+r|q*S^t z2RsZ@_6Cr7XNv&c)3%~av)yeTVxu+94!ZP?JY~M$-}$Qe%ki%Q3>2c?D}aoCXi0<# zlpREYiZxQscHC+NBeEZqr<};ROv%IeN+90dk;8{T9gMVLQA)z{uVafA452KcGYaNHVAU zk&4^ggb33%eVmk3NmR`ytD^2wpT#b3QAcFOiEHB~fJ!}7#N8 z%1nk3QY+OdGqYk@mc|#j(A8pbPQCEp;32h5b6SWvCaGtJo6e5YPzFU5#CBtV&D0me zRQ~QGsF)hXI91K)`o#s*xiX2{ZSD4K%CNIm%Mv*TT?K)&zFnOCxI%U3lC5I^-YReN zcD-10!BM=v9?nXYb%LJJ7z0*#(`rR4OMKIC>2sdksyuaXhtynMO2E&ryaj&epO`Cw~ zhDr^b4DQ(R=qvbt}s7mS|9K?XJmM0Ci?zc$m(RvU~3A&-^F%mpwGBYF{|Ul27-4?sDh_3x$ zz*$~qi10&-xtU1Xczb#LXve(Fq2NjI`O)$Zqmlj2js zJc;2vz~1wQ@?&Cc^sYTDsX`0}6VXbRWy$y3YT_)Nt3fxeEVd6OmftQ+HaMIbJ$pBK zf#7+q*hk&2{tPw`Gzs*fNvbTOKFeR{QmXu%WTss$;I?BrZwU0JveI*f-UsT9pqBI1 z6-KtuCDEc-_a4_i+G8Uofq0j%E8I;CYB!#%z%J^o)Ly3_89t021#>Jv2Z7Bp6NHN| zS6Szal?-)~rx!yN{Rla~42?=-AGW4Ebfs`5wqq7Uq0#$zE*zpcep#BR$ug*^THbo= zn18)nCZkban&|4g<7Y(MrfGA1GOYBfaUx11Ur+ZSbMbLTf`1lgueOoulrH|(jx)(Y z0irbBZrO9T-+L5zFoaIX+H~p5aC{34wPux2=3-undsP1V6w1yr)jTr`-t0l#$?0Uzy>1==;fzI~DT<9c8L!<%qnNvNBkC z5>%8i!04K*81FUzj<*@zNC{Ii_8Q|Zb|!;Q%{0UyX#&+#?P^70OGAfRTj8K073s)q?h zsIYV^7PEl|Qu-A*m6!a}Y2p&0lv@}+maPF()?BJglSmI;dG>4!ytI)UN;hBlPn1p7 zynPLRM^PRbj-Od46%Q>yayw1cwh69uo@AeYRw)S6o~>gT&e-TDR@Q%5F{$E5@IvD5 z!X)<-aoM}*k9mVas+l9#g>A#R>lVYeeV<#2$+RTft5$q3R(c+TS%BDyUY))6(h<-f zFjeOJ)vbmU(hV7i%%78rRE#+qj5hcf2i0 z{UbUBZ_S2hN6iYT1QTql#G+CUj!G=GspqriI%9 z(9a4uLR#zv_F+JE6UjJKsxh|7xUmole+C>IhwgL-MS=hyg92kQxSM`1>N8c09FT*n z4WT|y`5H#@gpevqixfsfpUoj%zqReU$WiBb_w296aH*RM<_No{bMb@HChwC$9{Jc* zpr*yw-6Ie=LGIq|`P=~=|89XSTX^E7BpUfiSVt(j93;rl@vGzq2x^zgq@mwgVTb%O z;7y}$z`VBV9=W-(4regK(yO@Y?hEo=bSvLej zA1U~{WeyqL9{2+Zok5E?{bkU*{21Yv_thKiz2OTUfs@b6d%kf^=~)PiTnSFttc*(g z*~CHodE4RhG}(q;JW4`moF~FN#MAWIzTy!*Lfbb04b`m*Xaa#YqEj!ube#>o8@EZ1mnQr&lnCADT4q z)ROEgTK0M2gn;!ky^eyiuIDcqR4j=gY1W3h2%UBHSZhDIT^%?L8JBT|{X?~+f=LE4 z>E}qqvq|B~KwxJsI7lunJ1vdiX$7rbvkbhx9X(drMpH*8sG&N^e!ar@mEPF7hPp%_ zb_JeQWPq8m7Xf~znh|((NG{!{zD3i3`Jhf?l7GiSa!NDA2b`GOQWVlZe#LK&xGAZ@ zNd5*`-`Yzgh{p#}1;a|4glVS;Oudk~PbL7oeF=ffyqkf05;pO{pOs|_F>>yu@V}TC zG)%OPd!fefWakxTb2mtWMkVyLkf+9%nOeQShkAFSmY5~@I-aD`S4Xm>q0rFXgs2=^ zjSi6kezx>C>QpW_sF1FtP<)(}C}|0fo42cUHnS2Ldsf+S`mDc>K`uMMF8S90MAc~<<-Xp|V+klal*BWQdE0o0<9dpz*Ij$`g!$=&{(l z3dMi?rpyh%9?aId#{T->zrOtc^P(EUppwS-uT9cfFk*W&nidq{@+Zg; z3Acm+z$}I1n)?0ovR61cPgw9+34Y5G{413G|NTWW3~JjiK-2b(H0xA7~E6QPV6 zDr2NTtF>kK)@4_MK0_oyawQE4)}Fs3vXte})P11ljRLVrNpIUdH#hb__VoJ60Y=(@ z70G?>=d`r_7SwoNUab<5EkA<)CUx(S%SZ#P*(B6d$5hnSDZ!T-Sga2v?)zlFBMc6{ zJnTDQznW2I@B2MStr7Jr`I;6RYpRD+POKdrf_zSk!IRuk^>G=SChMl~j|eHI0S^fxnh~LLTF&Ri@9U`b}))Wwsn`_ z^t=cFCk8w{4S%+P({|tGufbZQAvrW}$WF~Wzcr!7Eg?fz9e=g#g#S$^_B9SjfZVH7 zXD8;#6}jhvVgd9=xOOzrUvmJ76uoq1V34qx@%gqLFRJs17dIhFz+7ji#&(b{XTSN% z;D5(itf=H8Vo3V;i0(&;z+O(Fby6kI9qv>M!)l=7aiU~^6W zb2~Hqf*?BQn@9rr8%JcI-d|Y7)tQ_r->$ps_B^ylw<1l{hySS90B5Cb3?O%ENgUtdvVa&`cf5H%d&4^p!1;` z{jZp7uFl(PAJ-*{>b4ub>Cc`Eg6ApuMlcL)$5Sa5gOg)dw}kl+&BgS)%mnZ3{1XT$$=-~MsOePKWY ztLN;ls;;W8uRg@#SH#}zZ=PQE2V|vRqGekq`bD$ux2{bS>RaFdcUR9Ttd9PIZW9xA_UUte;d=Y%K$+qE4!`{?8+uC+ozn;!Gj0 zc(ITxDh?)owH)@CS?R40Xx-hgS*R~(XHcc<2cX0skIbgm-wDD{ZHc;XbuDs87006l z5bAgx^W=IQM6>_}T=QSq=M#Ore-eb2Tsw5uEZl!)GV=8E@qzV9_#UNU**G4p)2v5P zeCf8zH~$lv8;?!}(u%~?6y#_z6VKZed3bk%S`PFI^1{sNFNO43sUF^VCUT($=UBEh zi~TMzPCc&~-cC>D*qhWJDmGQ#9G7Bb4FfU;i_*O zCKfSsiC4SrDCpUM6k+$f&6@QCVz@itgLzkAZVWF!(X_w2UmTdR>Ws{h41MKl^Gkta z<+rvjKEAcR7x!&DS!d?OdRj2^Rq}hA!MU2tUX%}euNn_#6(*ZK5R5!H@oCl5TyNfZ zc>;|g57vs9(hY-g;?IxWlU+JZIh;<^?ryB9&KKO`HpMy)OIsM)m6n-)P8okY!sXFD zAbH)kYcgLD2G+<+%|UYc)jDJ3D@<<9Y}$NXYe#flW^jtPr0TuNZ8}#TLfdOvN6~a^WAKkN>buaxG4i~7lj?PdO|MbSolnp7 zzZ(AZKPD+Q-Rx=PR#yHcHC7e(>w##gS`6+om$}cNqm_o7{%v9T0r~7=SLAOD3kqLi zKXu`gqv69q->H)-n)O|6nQ3~gsO%KV^htmo-IBbLA;>JEBb`X2sya*t&EpsSpmz&w2Ea&;Q zW5>aQcko^R8||C7O62OM-d}Y>gJ!Zn>YLBjcGnXgUI4B8u8-NwCrt+8=r4N-c$OF& zz^8cSzuDL)0A0I=nLFsKWV)CcxxC#E;OvB`T7kJv$v-Ln{6^T$>LSCU2PJXZ8;uYg zmHGBRMKdImy$ai3oX#?{B*AuQv!EPL@FfxxxE{8qF#J)Rj6s(_kwm@j=$xDsUU8$A zeft6BTWIAICxK5$wv2j8@N}3jM&)5eL`3oxsG8b$wnriQd+;TjeFO72ij3@3xB!?d zKZ;1RrMdV;{~*)nYQlK?TjSd$h@Lc$A)L|R6&9J;%lEekLCO$?7VX?(v3}%Y#%+1u zYt+8cU87XbBlO0rB|FkJ6y|N-mivV;Z;?Ki-|^hq3+j^C-^AQ!jLI36Z<*yiYI*Lj8qNPl^UZR z9+#A>iT7!~xFmjLibh29A`-{Alpli=DBjq>mkaXxNFxL-Zhl-`>-q&obw9dUfvS9h zcsdUHg~uW>hfUWf?z@#rf$_EsTdU(W1^{2@o92(mjjbMw4^$3czh<8V?kKN&H@f9t za7QO5>Yd$aGAIku?J_A3)KCVSHD*gPa-(AI)h!T9QQNl+Pu z2e1+nB(g+e!2nA)&G=7O%J3m?9=CLxWl@g(*-D!;wXB2r(M^G@Vmjqta(Sud?fLZ! z^UiZ)7b_#L1q@Fz%gVZnk((r8@maBJCZbmiFir9Ts~~%MiPc|~N+s97VE|2lGFiuJ zFA_tRyzD^*&(=4mvKs2L&`ri5?DEsJa_(avWQdTlS02mrfH$60to_3}rJjt8Uis_UKY zzd)hhO#cUMgHoL4`J{)O;`g3t=jeAT-d{yYjBJ%gm{Sz^#WcuWFe!R#C>5(A4XB5FSxT6;z=^@CwG<&)#bWpNaMQ zE3x+^ft2s{^>v!zMGF#Rj7cr~N303lkhi0!k)=&{jzf4L$m#IUw8f3AtHo#N>EPEX zZ)R$^uO<~*k#Th99M^f`ITjsX$euJ$Nvtlc(phg#(%tSs&-qTMQ{7Jll5>A33y&)d zN+E*9;BD#9jqnoKC@5YRO%!)jMrmU(JG#qqZVN(c!g%jkv^^JJudJ^3s*k&4&WZtT zR)ZC%Z`STBo~UTsH9fzMs9oEl>pNo`Y&m!*O=J`4Ra-O9+j_&F4nRdP2vi#!dG>0} zGS#fMncDw_&zVH}D>*WHz|~yh)9g?v1h%1d(Rn$R|YcOTHVs{CHD96|2H0rfKyG zi@^Ntf)NxKi$SdIb!JlWsE{WPT`W!SWNIu2)w#7A@~6pb5V|>o!)wNKhIi&)HtW^4OQ^eO?op{}XlO8?)hN~GG?$#eAR^pL7#|kr5yBH_u21~c zp5hsacS;m&zj=C&0PQJ=x1iK0FrTUBS<+6$3spXU6Y8LqZJG8fUsuR{vKGC(c(bZg z{`JpaYkB&}nw72)bBZkQz|N2+S`!X1azEM{UVc6|O-{*yDzuhcNiE>Nu5-FS`p`ZV zXq9XEtiPz2rBdAh9oiZ-{Gwqd6qnA>3O$I3T>L~s2Lo5L{OdHgpZNSy&QxSaY9;AQ zaZ|E77(|WKAXqh|EeSa%Hs-9vCluP4ed2z_Oa5BPF zcJ#sotBMo1dh}NKlWRwe3W0OS5zP@6<5-1XQbhHhg3YEDg+Q>hU<4em?Myq7xf8yV zPU$ldurpj^g(Zu{GOoBf7ic_riwR-X!(BI^Cy88^;kkL-XW{GVAF2%e9UNT=Is>8& zjuzI(p%aO_s^cGJ^KrFZvFp>Q{}l==vcf`tu(CJo#~qLx23q{8QSu2A1+j(Ue}vzT zXx>sHH^0#gNurqW{*n4zWesMj$L$RwJ`rMrn0d0c%`$+ZEH6NFXj_~PYVWU%sM_+F z6!`)^l6aE!gYs$k%+<@<_E448LZ*V=%3?n3UQo|Cr?}P`%TB{R%&2ZS7j8 z6`OehEqXdoxaf*Ufb^g6Y{v|?f~62y%dU+VUdGmc)+rEvG>;!sIBa_MQe2adaI;7U ziC9>?$5lS*$CMEkViIz|Na`CLS;#&PU7q>A9`1xj4d%DqmN$m0W_llQIrFWO>~Q7B zJ)i$9X^9y5IO|=BG9jH4VvdxBqiDL25FcbvUWK#0lzxTXB&)M~L~e#3`fE$s@p||Q zb(d?LSi3_JPq6z4GsL5m`?!jv zJIj3)E=a1keTTv3MK)cPZooyEX~{H|bVARueJXogq$<}DStzIoS73uWek;)?yv#7A zI<-emMM=W^aUwS;Jm|1Vc53U)IYLL)pk29&C@T9lI$L-VO@H}Uyt+3}MJyY8OIM4r zK^5X?vF1hr}JX=BbaemhQ5Ux`eX?n^$XIOD}p$9(Y% zY^T|Fkj@v}3?TNwCk*sa#?8ndfYHNNpo>>!-Yr>T&}v6iuYFpuok!}j+fasUNq5Oj zky*mt6sjsOMq0DUuYX!4py`?8#S{s037{kXICUIRe?>^Xqpn^N8Z=*CL?OfA%FBRl zW{0Oq6KgQdYEdK~%+vkVeVXx%R)AvnT`AlYnLKYHt{f4xv%5hXg1(Fp3)EA%XcQeI(4$WD7 z7cECueV4z;8j;Z}8zEvw53_JB+v7zJ-r3<~{%;P!+Zp}2oWUWu8Vp@lTr9|aZxb2= z;x@E~(aBtC=E_22md00!J=H7c0d$6sX)tNA#Rnkh3kOTiz zNMO72x5R>sIxf4SRsgMK7Og7}ifPuTtYWd-Cp6o9FhA7fA)T~~`FlmQzZALQrFqKe zp!246N6+b?v)F+}g&$!|p3c+^NOo5T#c&iF%0Xfmf!+j^U3T=e4QNW`an#DP0nasi zVGG5UJ?Z2kJf@Ya7tza;KJ`qkYMSpbF48n+Qq;^K<*DHks7k;pajwWrS4NJdjkP%F zcwtiqUmvHs9&*`nAuWna`4>_1d4^WxfI<8V2p&xF{|cl?;Q(8Xt&#mUlIf1Lx-YSt zBr8!<{5uK9Rw0JMscPQ{e3I}eNbU54Je!H|T%=BW)0u(V{S;>1u4vm-dX4ZXc59}W zb}KEVYK>O$#{mNLG#ye4Xff4XQzPkM;jDwwXPh8>Qx^vR9EW1t;R z1$HoZVJ^LqvC!Zf_PbPGe_ zcD}(~%8`oj!a1(&O{?RhTMztmnAwm6I=exn4fgxh@(JdREBXr}{N#FG*$VSLO>(On zsm#S@(rteB{Q}FBK)O;YDE1$Wc;lMyZ%{asaRtg4nU6SmM6p=R6V_B%4#yi(*b^mk zQ{g%5lF(n@g7hyPt)QATV%TOeDT*(jy{O@E%wJY9%Z^cm!wHlfJxz{{8CB;~&xT3w ziS}85PZ0uuc!{MRD&(g4(`G~ZFY5Efw+|sn4Q5GSE!qDhH09NsF4zXf3$bB5tN@U5 z4Htf0gOg%Wxn0>mFQgR4&w!cPfl8w47Jr)MEkCq29lGM6&=w>sBk!?NL*U(rLnaae zblas@#;!+Ensc$>yiXsPo`@(jj8&CCmf&_MCBq)@4`@@UL5i$)N4Os7py*Mo!G9yZ z%;r9342HhM$^=h21o_EdpCHM}6{&Y_(l(Hw5Ht+d(^@uHD9ISAg)cudW@3#_tHiu| zR8)OP3aqxt5DukxVOD&m3ZnB4o}xI!vf@lebC5;P@{|0b>S#Wb?Q{yViV>Bv}K_W4m8&#*H(}4xfoD%L<8j!Z~1DeO5ff)Z2o z=nLkfU+ViQA%N1Nj3#Mq8VX<;w$nfsbbrqWMXF)~lNnxdF7O4{2cTs}$I(pqFl!h1d)}&JAq#KG~CmZ3# zr<>6>D+nd^s#V#ZwimVJwAUYW&1Vn9>cNBfWa&#Q26~hYsw0>q+|TX7N`cpM4F0jI z!K1Z^^{`=Z1cidie01_9r>Y@JN^lvg?~2By$WjfXRI$Gu@p%bPTMPJ*_Rrt`!#3>qa5{7m1__zFgoBqF#9WgO~qM%Nm>bo zC`>t$De1s6{L7{nDoN-6v!Z`pcu)(U5{5q!(w@2!+A;0066Sy*JvY$6i6SGVdSjgP zbtp{ai@*6c>0U5WhH00DtIy7xL>iw?hEoGGbOkkr0$P)fn9=QbUL7fWBSIqW z(TL5nu;oWV(07^MDiwOz{)mDi>4-hD%<&{Rk^z?j-vj7abV@^$AOv%UReObgO6Njj zd^%isPd<9`quJomO0nwH=-L$dRZl88r}6STKD?J4`})K$-$j+S&3-O}w&P%%+9UV_ zgqxwLq;V-PkVkMdYWDeMzyF(-vFEi4iGfELMTx^k&8(sFljOT1`!gaJ7v!ckq7;OR z)Yx6TnW=9}+Pt+2(dcB}od}1&&QNQnl(09OwH1MTMu{E&&0zaA+!|b3k%?k&?&=Tx zWc-Yw4w^a&XjP)Dr2k#9|k6c$U4T^CI7j{{d3>M4+0=3>fThviM_8l zp~x>pSInJ=Y?Hzwll0_9r0(k#!pz|-MKZT6fiw?mBg&kU{XWHeEOcc&>%59c+pj9y znY$t3FJBTV#@ZA?<>9liPphly$KQvrd14>?U1^G}?5T;>8(V-mS=~*A^m!lZx;Su^ za_Cx&QvQu{oEehvU96*8I>`)IzS`Y)6uSQ-W*(hWtK^$X7;{b5kCF-nwOLC^{9we7 zX8n0e$}{YO%s%R)oM0Zhf*g5Be7UNKQq@)FZhe;MEH*N&jDrmEblhu|FU)HCixK~g z9sj;OBZ=pSpk;b)$_9OUt1Pvcbsh9<3mLK&V^X8-Rvrwgt-Tt&<%WJ+^(nF}ysVJf zAQsoS#EGo*ie&2$RLQH1J`3yAseC70h3lZlVAx}`seUaH`L&W3uK&ZZI7C7quD6EW zIGVH*uaNYY1TEz;Qi>cyJ(y>#>wE0yF*Aojhk64kxxN_6+K6b$$X$yQnd^!(WF@4E zyR-orzd}pztW`+%wh-M&kNIiaKWQmotAO@QEPssSUQ9r4hBb&o6P3WFH==pXg52T; zgH5HR)D1n8m9^jrXxIZJ@sU?<=SxEu#tsGD7j+>ep+XIFQyA~riqsLBl1f@$hTi=8 zp{sV26{TcaXJ|-F--prX8nvp1C;7|2#2fIs20Z4VT6!L}s!b*jYvW zZL%v>SV%^0_DNPrMS1b@=$(>c;l@ebDWb+@n&F~-U7Lo8QtcWx)|I9erop{zK#=fd z%iFNdDyGcI9ZuTiAaYAd{Wop1$BHj|e}UhoJ&-&wj`#E%VZx6`qrw+~O1~&GtIF2P4olDHEk4R8JtP+ijKYKc43X1IQ6_ED_$3XM5bH<{6VO=taWu{+t zSIkH%aB!obXOhMjdq_T=Df5?Y)z26q6QVf^Emb{(k z(jr@(FwFN<6FVvU*Or=xX&q_YpuFiG%?$srG4@MNFncrSe%!0p^ymv5t9ByH_D@#)ThRkBjl;FKB{CMv62BQ^f}!YvXj{{)WUK^t7wcAsX3}T zJg4}Pab&Shtv&E7g2GBImzb!0?RT~h|m20__#uL z?`bOG;Iuf(CfH=e1v` zR(ZVn<)|I@CHHB>f_u0XaqssD0Z4ki>L@v@3(mb-jR6n!ks;y6nOH>GqQc}X=N&=n zpYAO~K!q$hRbD)eGdb)Dz|*@p>H$v3#2E6>1AywjP_v2eyC`=ybl{H9eMi-D01I4| z*#r?joB4MD+XJh9k}|#PSYo)^8OY{dj6)rjKK>j zkZ582ZEIa4srQ!Tr<;qSm0n{mpzp_zh~NBR3EI0Wvlm3J8!$Vwb(L4r#gSsE$;lxA zdk8_CwktCLZqzK9?-}U?XeI#OG^ggv)^q^madCIOe=w>b^L^Deftdy1iE)~>VF!@o z!k@Qz@b~JsQHnKc#`!u`doGYv+AveD{cyGE`SLjq0U|8_6oLKu0NsP%nAW$a6aM{7 zgZ%(YqL7znzO>*8P+Q58b_|b%L&C&_>TS0%TJXLEU>QpGx~p4lZT;{cElLI&I|btM zujJKEJOio&xyavqllnLp)L}95S=fhBHcSGIy!3`Sf*(YF06+@$u1*S!z_b#BoW?9A z4SS-quG=71oBAA8mU?_CjjzYB{>&~)oi)YOny5PSn{cFKL<>USC&X{%_vu-@Id z6`<#7Pib#89uMG_J3K9ElqU_u%~S1BH7r_|gCE-%Hui(UsjRsZIsa~nnR1_(>5Jm3m}k6+JuhG+lCz_z{MW-&^!}*C-T+A=t_UfKRqbMK`W3*p+O`@OhCQ6blxdri9QNMO^?c6)Za+fo ztPLJlbHl2%vi4=FZI3C3xNu>;z}e;q0{enH9b^55-lbp@N;gfnsCZG?aCAh3x7%Kw zO7riUP5@jO1YGzoMTimkBJzO%Y9HK1>zAD&uj$MXonEmUI;bfg=6;-GD6QYZdkKJZ zUdy!khi12ENTr_2m;!0$&adcHNj63Kh;fR244SpFaAQwwUNlR6*_1V(Y83f$pO#yz zUg^NI@Qu)ITx1LN#UH}TVj2rvyVt8lq=rD5eg>aZ{wt)Fj6u2M6Q1GhOeS`As?6s+ zCqqIuBcPgBDvO4_cdfE&Q5=ptL`aY`C!llEiv|5q@t4?@;KiNEa!iyD`Ei>vYJ~@5 zPW)agL!((@GRI`zr}}=)3c%aEci9R8h-8{fer1>IvN|82y(Dyv?5@1qbU0|O1u&8w zJgh47BZjY-^~gsp<|L-)0aT>ne9YzXx<-xfw-2Ze#$$@(PKxkXzNow*u?!kZd1-6$ zPS!u>Fb(@aAh!?eMWuJB-_S*`$Yg;&rmJ?&c`mrctn)ejtt^T+VRy9$>uFx`bUf;O z@8AaBlr^Tt<=hU{|EH5gz6YEMdMMCqL$e{g;)>xIS05XP%o|ULAKuT?6zCg8+FT!K z!mS231Aqa?VDn*tL!(?4DIqr$pZ#}ns8gY8LdlhKk^BM~{FRiNY6(|@sAVC>AfTejtlU0s>3%kq$FW;A zlJR3uBIguc5}(-XCIcOTwox!@K+AVKgvf(2GBPq`tj=noS@dMVai-rNQomLB{W+*d z>zJiOUbGi#J|)qe{YB%yy#U5DQ}yYWTp_TxKaiMyc-`4S)#3`rJ4U4O4Ly@hD9x0z z>g)?Q+!y^m=B8HXY^$d^FVVD}0&vk04bdH70*1Tu*4dXA#`BFXogeN1x<~vyfJtCX zHZ2(OP`e2w_I$uN13BMFS}gQKo2oVNxXb_q-s$!Y>02%4Jqfkr>$k;+`S2H0##FC^ zw*k&!kld|Q<6-Bv_gH3X+$!N1>ofqzT)&sjqmIX#{Sn|a>lDv{ZT9J}L*Q8x)EedB3q zPQa9|qmiqz;}oH1M?lKZ*x1|x%7~1L1JtL7q(7BsX0L9Y@xELnn9#!-8!qEY-i4|0 zK+g;~4_D?{SX+=IKin5^SK1y7vx_J(WPsoLPB#NkQ&IoHB=(7xv`yEqO7eE%@V#Kc zg3%ugZS-+E{hYqppDR8_QMQUL$n~CVj*z7(ZD-@j<^VLK*NG~jnAzP?Ew-&^fvrEY zN5pep!Yc#BXF<`gZ$)vDus#)ux?CLK-@a=;_;K7!sSb4mU>ILYZALr3OlA|=PSjD3 zNo;+mRwG(fKu!QF_?`gNcUNu!O1;I?w(uaJr$dd|o_iV3!FP|#qi4$-O02F*TH&SY z%_Ie{3m9LT%pk(@H9Z5>`MM96*4Jj>NxlZSK|8gm6E zDI4)hHlUXRVQ-VS^f41}XYz~3M%8dVSwOq^ z)1GD%CA(BP>h4-tf%DJC=*k!`sB`LH^4h(*!*aOvi>DC%i`bod5f(0!!X2L3?Rhku zH&C3}19->VGDiW(4O8qLie(v!l|~FjnspUi%=2su#ajSGt`F!H--bXb8Q0muF59Lb zuxUf};jlz5%ZMCp%p*ltJ~N$y`ez3{@0e|`Y=a{qUs~(iIhDf1ToO~4tlm*KcpeD= zS$FIW4p%7+##!B$E7}w4j=<1Y+ie8tnVc zS{`W95_#Ej+4_bE$y z-J6RrGF;vhi<7d@Iz-Pn^8^;7vd}_=6@1HAYx}38B515~X+WE;Z0ori0`suexZ&RD zt#@$C{vHflM{NVBmw%BGNi;w4HtE(8>w5#6q{R;dI-r+wzy(T44!meJ9pT|2q_1T@P?IEXcAZgh+SiMo7FKUhQxzMKK(WkhYhHsL7Juw4w6F zry>;vKl8ecnLsK87db3sCU(;1M2cNH&N;9XU%<600wtst&D#VtwFnC5;f0RMt<$D? zDRQ4iW&!=Wq?|5X9I=z}&8Ex&FC~M2xl6@gtfjad}yikd^AeUEtvq0dwv3SzcNajGeR~A72B{w~l0G;ui(%4fWye(NUqtF+9?d=Eb<$@C2$!J(QyCIEwa8O(q#yWyt#>hcx+sg8G6j+k|KSVn)6rKG3BAvW z5Ay4lLxrpknvxy&hug+iOO6K%2$+}lx?YA7Tz*ECE^YD5M?G2VOR%V!wT&vx81`^% z^RGM9;vX$ivAUmY55(MQMkYiSL8SS?jyN#-TA{`Bsr!BncJCe-~G-M^)PFdykMrB==6g^X2^|ARxzK zDsr-~W?pK%_SszqTJIbPq=Fp-P>Jh59_#3+`tS+-E_5UF%|3T)7$~EZp1sg|OYf|d zUP2pq_5pCHJSW5Ax8R@tzY*HM*6@l;-~}?lJ!ncQopevX6F7KYRW0 zbJ!YJ7}OJf&vgCHXXgzbBzjGCL`ka9;7OKryX~}ww^6sxMo82h-sySq7dCj9?}SON=Qwtq{zoitI}n9BNY@3PeHH zP{ku^laO5#>Zu!S7EQ|}I^ci08RXCkxsPOwirhtFSazKS@)@~92snFCcuwAWp$iIZ ze56X*NzxKS$Ve>x>&aU3&1rC90JTVWhBrynXq=Zq27aPGg{_Y-vrJL150?n~9!VkT zJ~9I&er7(y_|A%aQMyCt*b)x;^5hTW8UIHt5;TTDGd~wWfB{LO?0e-PuY%9IWp4`W z92x#BRl_Q6*~(vxcR3=TbL(;pLCk3n2=Mr0p93U%-9ZM-516dHf06Q&sH=}-9S8n3 z&wsxd1OkUnY_T}b(?3uB5B(A~18~UyU-RLzTTh39)({#A?{YkuC3??(xW{6^>|#!{ zi~sA@qlLZYiCZCB4FDT`sVas=0u>xv(&w`myZHa6N|VUsuZ!eR&|NcK?J-o&kw8|V z16RAA(z4S+`E_r)k}O~iiv|6H_Lih5G@5VH%r!V-5&l^j5=l4-50ZLItrD(9?H!rM zB4vmvCufdlM1{eK9y412zvZ5~Mx{~A0u_)1tl>2BHvJDP`JdGeS0tg!WJ|FwRfo>r z%hb0U{BO1Y`fjoqHs_9LxWDjc`y-A*K_s-V{*?M~|K1?roU;eG6-S2kH6INR0Xvt` z{R-xvr~YeGNeE$JhwV>Z{Ux0LY%@Pe6|ns#??fLxi?|p-e@MvFDe}L+{PW#&27oLv zF$Dczg7W7df1#70fyBaW{=f3rSf(WPjO-IVO2q`OxsMkzeFZ7n>2SV&7MfvPCH{1n zwMa$XFGxvn5R2?TRM`Jjt!Q`epqRqMrRK5%;O~^^V&jeIsqzFTFG>0S&20$i{0kgv zmF7&PV$C_Mb3c#>-q$sNQD=L6RecQ4p;#*6o$h+*&A%l5&pZ^0=@O^YC2D(k3bVLu z*rp_f32No$ZGlod5gV|O&Mc4nSJS6 zi42Xoq-E%ZTM&IF)^s>>G)1;7Ja7JvxDXkl2#rM&?ri|ic$u#T<-|4^K_NQ(l%PMx z>2E2R%!Kb*i?-`+S!{gUKhkz8z!@m4kSf~qnoMEFAY?^HxN{X5mO_i45OZ@}lJ%z& zf(Tt41FgMOUQ=^d6zo8ZB<>Y|Rbz0X*;t7sCpN9>R0XHyG(19?2(P}>Ka>0|o+JgI zE$b8wm!_db9#7Qc*J_6`X=3?V;y;y4qcYu7U3AesT-Wb7)Uqq*UoE|J>EAv44L>ZP z4R9PU|Mwby&Ct~dXu}$a2K~P;_n&#afnivJXO9_0wE-nL&W-;1-wpqJ|HJu#VHgzO zM@sUJ3E&@fQIL2@afc>O0t`P15O}n`3Ag~aX(J8AzwE~!+DdXXK+`_AkvvlQKEOa2 z%|~NAQa}vgB%u7Ri}vW<*8uX>_f`^tkEVP9m=nz3Z=XMU_hZ1EB(UcQKbjH_b`|4g z??rlY@{w}09ySnI`K&7De=_V{t2K@8zObi8d-OSWuzv)4FLA8eaTlyn39ef$(TY-D z?7;XMOQX<9s{}FJo+zzcmBgJ+9HX1NJ~i#@qg#~j~Yh?*;OvdYPI`*pZv&$ARXhuVZ&W zS)!r4`FlL>)k#*3tu4>esN4@-I-d@^YFHTFyKczwrD*ssyrQ{J^SUd?rc<8uxjde| zDV1iMdHHZZBpUgZ;W9&$SPr2;brk@~``OuiV5Q^hEFK^PIVG|vUN3Jl-;AViT(_u+ zp2U_f(R48@U;gHKSEyWslQ6is;He)_y)X#4|AyP+kYM-0ze2`;_9eXv(LjV`y5^o= zp;D7ogQq}->H0KI=jQ6w?ST;6ud#P~3(ZXqyYq8yYcV5b@t`*qhZ%%vW14!bbIu(nOJsEF(V(6y?C+Zrk;6QXUI(Yn!|b9~4TuqJ9-Tsg9^C>{&k}bbozi_uU&oZ9l7F zsyN|2*}~zof41b$nM=*+Ex%x92}nIe?RagT?)gpTq387}uF5e-=e-JAVfJr1fko8! zMw6cvq$?yDIR?kR44guT4Ikk3M z?#EApS>UpbPMeqEyZdDPa>pv~ly3}&LDOVrzs$GvlHh(gz)^2spLC@D>PQkYgPuZ~ zroZqd!E0<%*$(6}$hpkocF`KqoPV_1s}Yv^vlFM+_`%VXt_l~_EL!yyH_P=DPr2G$ zn;RnS5t(CAGc$0avso@ibkwx9bz9N^t@bPdh@e!m)N-Z3W3_G#Yqb(O?pFmMuq=p6aBMD#nt&q6(tGgy(x}IZ4HETBPV^fcz5fH?EcLua0Gizpm9@QawBhlZ4V=!OD#> zmN5M?Oky((0{W&~t6)xz5E0+zL+l1`UjEuNPO->KajD+UHi0;7|LVTGSw8<14z%c- z9JN;|h;S>-UiTYFdH=IT=0t2H{=Hi@s^^&GzSq3FN7N4A-P^WmJ7}1|bcShV7f-Q9 zof{uEcd2SQ_h?gMLgM|WROg3rMUY1N=mHsSSs&WdDp2yq)4u(dYsO#4#b9PS3zVRy;i6unAGawx&+SDx!7Rj$oZPfz*nETaC* zOB!lJ+dHJhXA2LH0udV7m#mp0oYyXf_viiW=Gzu{lYViUTW&2%sI|kJ=L?>dNtOg# zE%Kr3T28Y@0FDsiz*$kWySev5$vWTIX-gZ#vqUk*aYR zaY}i2v4QO!J54gvzfZj<-g;IGwv z)$TMDs933RUl8+>MYbqGlJcc&8gA2DFb3c0_pUzlTEc_=n?X|O#_Nt^WzCGBu(iV4 z$iesmPVNZC6$?|}I8QXHAYmEXWYB5mUcKxZy~*Pic=ym1Fa^#rV-Zulw;vvy2A}zv z0mlW0j4kR>r16#v&5-VIx8$E7hgunMTl7|q^d31maGB;$9}RWT%C2~u9P(7NI|9FXiVhw`=z+H+hQN=-e zcs3LOULJ)*|6830&&26L1jtQ{3wv?;`Iiii1nMZCWuDb_DZb^ z%QYKu?1N~k+TQyaNZpSz-PH2`Pr3YCsQ$l}-~#kd%W8Whf$wnwt?S(%A*t*O?)d8} z^?wFHTmguNK>VT)y>GPVR6sN|5Ip-p)tKM4ph zD=aXt2_Fq71M1DeWc%=}d!9`LL_?G$S&xRlU;@$5x`^ciSL=y43y?|}fitjwG;9r+ zh{eXP&?GG4XTAw}-=M(!i@H&pSWy0!Mo&i~^zi!rFL_*sf-RbM@NfBQenf^FdbXjm3_ z$d9$)i30r@R&x`>p6HK}QLT^mu;6*LB!$Aq!4aqLBtVd0XuyWcsQM|2%EK^uO%hN`ocRN4Y2Zahx~ z-E9>O8It2I4z;1wK+WvPyf^mTc628+*+)hBj>2!(Lnkmpd)_{H*K|cBAg;qJ_AW_5 zsRX*=t4#rU8=ELf$Kjl~ne_Jq6*-b|z=anthJoa!!@6?Ah5PxuL)`7{xeiTelyq*7 zQlWYXTwu2bjE0q z2V%S0!*agP0#92LP zGv0fvvSs%p(9t>oxWEOqnXe7;OT*`&HQxcPZD?lpe`ZEZc# z@!S+4jz0ge=S&BLX3KncUE0ojoEmkuBH6VB=S{gY)pOds)@iG`Q~_4asW{zP?ALo< zpB*?(-kL$TOLDo@OPF}?>2-)e=UtX_=KCIJP``moO!lU7rsi-aRO0`gYJG*urG`p{ zW7Ww=F&hD2kTb>MMkQFB`Xw})?K2oOD)Pm1BP=u2UVjzM3VD5-Kj(S=e8!<>{OmA* z_+t9kV$HlIq2|cs3mtb$L_~yC&s?NCl*;$dPL9u`2h#W!luOh%gVk!Sy-VfB0swb) z+BYnudK$WY@!JaU%^bn^Fudu1;(3K9nmJ%4;#yDwH>WFGaZb^$BEL$txpedXsqSTk z;h(b2i+${qfh6`pAnVoek=G+*_0?^=LaxhA7sDV57YkHlueKLFUT8STcWiHP#mTvP z!_s2iI|-ye8ZIj6%+GnICv$L9_n#`qIIM=7?EcEh$Ducb#chiLGX+!x zR_~kdrPX%2vg#8WJ@0o_l7M=v@w;?ih*`}Z+e#swdl`g#hGlYGl<%@iDPOjg_tv1X zfbH@?I%V#}=?Dhu2#4{HGVyDpsY6A1>{9$D6S0FWv@CDzc#F}*#nOv|;q5cptjtQ% zBVUCWd60ZKUQfIE@Dhl%p3sW_324c@M}ch6^s)@kwy%T(k^L>J%GA&zdlLB4AGL{B zeg-I7l)QGGX2HKbpuTQ}x0DbnA>J}$Nvi*}|C8ABV8QP;%S})zINwc6$7>ba?QD|@ zi9YifT}&NbCChEG&Ud$AbK%Mzh}>dPVY9@*GbJg69tODA{qW%u6DWjhJ_GaP<#xE& z09k-O5Idccj;6zLy@j_bV6>wUh@*ZErH(81^&;s=2>CH z2H7pG8*SQnn4ZVrF<4N_WUToLB=HQ0|9h^#1k_S8rWS9YGz++UkrdVFD8pRWofhp0 zQ!j#Rmp3JuXP_jLSZLlQ?as`@VrbFDL7OzESEP6kH${4BI~mK?LR&8dip{gGDHuyG zG56l2>>(GkkQ$P=TqiOf&pMAM`vzeLjgIGmeGD*-8V#i@<=dS{+w5s`cn~y+bf`{` zlPE&Y!btAr^mY8F16Jt4>nDlw{?}Oy0hgVL04~@8CNwS%F{8 zF8Y67!JqZ#t;5^W9#H8jE|tL-+GS&rt71Kb!5{u+*FBC(y}aP()xE(IB_ z>HO$)>!x0r$+#!HrBHelK=Ul!YTF?#DX5x&+}QTndq5*&qu7;>B=Bt zEx$%>l%ci@uqaOjPhU5+as}1XQ`dVct>nFDs2|F6&uOvwoJPf9rHvd9s^ApoN|^Sm zI9L4KE6Ue?Z-0oxAM|FDAuwo)_Ha;1A8Ih=zy6DnMr4!Rj7#%vr)@_m-pE1w8~0gx zAyxi+^OC_(Dx`7MZ-Svdz6>A_VP6~_H zn#VitY2Ogs!0xI1Lxx)n?#qjdv-PwwA46@)Xn9N4;l-@^5=Gix@cKOy+~{&@F94%# zQnCRwix2EvPf_%vmV}g}?90IC&4)hRxGMmQYunscK zbQ~9l6wD9aUhzCE zo{I8qr2$rVd;H&#IXs@8Q%WIEz1hd;qh7BspXvhIkWnDGQwOnvvOnAzD6(|+XFOl- zYfh}1@g<)Ln_>H}R(9jWGq#V*`a&AZeh#HrwJlG%kN6a+K(N20v8TU`Hgb|7RTh{QUCWX6w z+YgBFF6E;8-+(j;v&?3A%CWijy6NCGN!)g&xmH7Yn<-0OT8-MbpE>pb5fbTw560^n z<9gBX&8$$?@fjo0u7q7+ajH7lts+NR^&Dg9RL0==drxl2y3xB7*B*Cq80*$Ly+rhN zbGiIH$t9~`kdB(N;J(Za2oCDc7T=-M#>xO9awA`#VX;|{msQp=OG1J%7uc@%X2Jr0 zjP%+68i|ir{zV0%d~OkkPeOdoDyS&99yHr;TC?BP;&2*tvn>nslJw8vD&^y#Ro|M9 zLKY3=>G#RUc-jakqB;`5l5Jj=H^i*!WL}WbM08UjQA;8k%#5B+HAVAdT6+h~-DY^G zrQa3LN&dpl=*W(4+q4YlePJ2Gui*#LMyTJ(i&rEt%lE2uQou(g!b%*QR$eh>Inwr> zE}(ZU?V0QS=izg0#%qd}(d@QADgRz0iszUl9D3VKM9+G!CB+yHc;7*V)AB9ztA?`L zg2UD}1He_rcIvL8Dc74gA+2e`#u)oRS~O*-CBvf|hI_3uVZ&#bqG~e9hdi$&>p8@0 zWQQh~rhgapMQ$PoR9T`ZR*X?bL3$DSW*t6^nTsCA!ATBQ#QiV6{o3)+H-@V@^J4mD z9B%M__#d3~`l&Xo+8k=3FYsu;rOLq2xq65WRx~^(HK%VC5!2eA-XK-Eu7KDN zFfkCUdPU$=pqrROR`54rb^O|#=2-brvs7ttH&%mBrUg9Sx(Sh5Ug=$t?O6 zs=q_C)hs6aLj=d)aIy47hPDJT`}TN99pTRKhA10o_te`YnR-Ek!IZ9r*XMMU+T(?_v%I? z_-4vD*vo0OIw7^fiI{(jVE&cEY`roZSS+8r>xetdCUH$;2oHC|)}WOb9sK zT+rTHFY}h=l1RsphPGs=OAARkoIR#33T|osN7=Ze`9S}pa8a3-szupiWqb2lXp$Gn z_uaS`RlBn5c*ud-xHi*VcmW$gENnMf6{n2pczI;CbbS6>wf`uP&YxX1A<^&kbC`GX zIYYE1*n4L(#mB-8u~Q=8{})<}U+b`K!k?}z`%M?)H**T)R1=`1Du0;B@@RK_tY&*< z)qA1>kfMqFdYNxweIt{HXEj*QA9c)_5N5vmm3}Yi%f*)y{jVj_2^P@QasLGo*zdFc zeu1}@;#1{ohrg&nOEVGI<_?5w@I3brW7xy>nx9LXxe1 zt2BJ2nS|Lq?tL>DVc03@wKlQ(1iL%`4MwXl9kyB>QDq^ob4iYMiehI^VBtf%mGl7W z9>Q0;HFh>!9_F?Rin^Mt?O);WwDQ)89vAb>J1qaxi5@$leyTs}i88w!N>v<4;V>y_ zuuoe9=YE|D#=IiHin|o3H?FQ&@4{QS&F872dm96zbbCIKpUmE3FxsRLR+?tt8RqBR zzUn=kt8XXe)Zfh*gU=a4uGe4merhA#nZR0K$}?VDo2zp^>Xd(>`MMJ@@%kcHhsSl5 zDIEh|DIWD3V=)8eOJvd80bRRxrM;;>F0q2IjYiN`mL&L&KCIEpbkxF>g#E!B=+p1N zAJjt%v2aYV_Wedc?&ga;EB(n+yyfM?Cr{5U2pRRrAWH4cGN*bx@dr^P-X+@eEXq<3it-FaascmDNa=$=5MpXfG zP;3+&9L{AWJ}SulKCo}+YP*TZ8oJ0mVUnEn21!41gG?vx?b{aV;e z^W%hLMq0b@U#E~GYuC=IDDLlUKluy&3B6C7YK(%c<@!2L96LHjin+NKmNb3`{CO$>^jcPGHUUqO{(7rdDPpD3%p{YIPW*8 zB{?X6CFjV*yk=oQEG*Upe`F$s=p6gC;N!NFmoP1DWxrtQgCYRwC_c|t+SYa zaVWv)=$7lt`tOyt?5ZC=Q^)P1Q63qWW$S~S+12?uv%kMkx_huZBp1Baa?Ne{>N}=4 z56iz3-W>nW*60t!UiHT5A*N=_fJ(yPAzQJI zM_cX9IuD4gJ$P#$K`Hau?R#qh`V5Ud6=*f+(q)d+6+%^!Dq#MNrK)xRQbDw;DVChQ@rJJ2BG|xXiO_Fx;u& z{5b+Kx9g`#{Tb7>*HQv#hX7(hag2pn1)>sS(?ydN+W`2IXlAdJ!x6e zHQ!1}OWF>>oJ7obKCsRoXy$C6!1*4Ci(x%J7J3l^gXNGRM$x`Mjd`B}r{W~ZS9h$B+c2JeRzS+V)ey~dT>P_$^cMG;Vzg~;K_Hq*JO8jSl<#mq$Zk}@_@w7)lWB?)Lh+PNQE4L zvX&+Z#Wucai;7mhoBKrtXmWp|T2RB$ZYn}e6|CJR%{$*{W0P7cZH)l7+-P~@?aAS@ z$j*AXHOa}O9sv+;hKIYX*Zb$?ac4KJ&165Ln2EB})>FPsxaOnv!a)&%+ydP88WlEq zGE=IxoqhULn}kd6nDH}LhsLiMLC^o3s0k?`Y`^|W{&lUtX!6%9`aw#eH~nQxgzfLY z^A^lM{_7NME*dT<2HE$R8^a-;A;YC}f)SZ&#j~*@_Ra>2dbhJrSn_*)%`_t{A|403 zds>LtblXbq73En=63i-`hYu5Z{8_cDD%-6qwT8Qu{Gmx`V@sS~b+ar-sXuK0R%@%i^jAzCg1?~~T% zIR(b&L^@#UHujDOg_9N}{$Xg$YG!M*y$sMAe2#o*!jLgq6VexX)jbX1fhcmvyyW%< zG9b&|p42S^&cd&dpwcZrp}+kp<9sV$)Ko{|3)au0uslRj{GmoZ@OW)yeOPahEP2RE zAu#-k2xPH)S}-d4&I}->e$L1ZeSI@|UX!G%y*cuQf zx_LVL&{ADo3!Za7@g5AMUA5>#^{KV{LtL}1`ZIJP*LoWQ=rrk3H$2Ufy^lfK{I+`j z4j@Y^H`IC1GybN&;3mPOc_AA}4ucUSm0f=S3>}!<&4Z1kABT-Np0xCyHeNyJf3WrQ zeJ@yS*Unw&xgh#QKS9iTZIA%uQvJ2>>RF#8=^S4bGx_GY|LSTL-5!vud)<60(gw$_ zR+_H>J1x;R^~(q#s{N}+8%p#q?Lw>%;W?O^FUol>@GunC#Fj9;l*>|D@2^`KNT)u) z0wJ)H5 zx8r52=<|I~KRt@##714IO=sqNwXKj^#0;?E9{Rog%2}~&fg#T;{AVxu%7%R6gP`&p zS`sq+JKePSNps!r`-i4Dc#3ByTveERHV6rFSKci zgw1|Y+kbNasLMSl46~|jBd}3Oq4S_C?B_)_S08?ZNbUe10V;6B!3PhLM!yB3up_Mp zrm&XYTX3jhp9-%exLZ0$aqeP%X_JLb3X0WZ>OBNzX+Rg$ z3Q)jKq`f^bgyjn!jTZ`x`(Uz0_{Q&wVG02R)*9+F7neZ0OH(h8l&DLG zJe2$fWJQ)hsvuKHyp%ip>G*?v`PK6KhX+qfZ?4bp*qW3k-nXwb&-EXxjy!L>mUI^itx4eA5rIrG% z2m?9+7te*x(3DSfi|$HL?DnvI8W=MH0}@h`<23_%Q0c4JeUD-lhs9MAQ2ZUV1Lk5t zIGQXP?O-*V5 zJ#AU<_nyRdS0=L7-nC|a1pGE0>(t=2CG9!i@heM07bs94Cj&;x9c_dyD6}3Dzwe$E zVjZFxf5)Bb*D&gk^_-GpikT2~C|a(1=8p&TV>PrR+hlRO(!%j>SsQ5TbGm*4{rzTS zDs01w`W%RJKfIUWMH+`R{9vej-B_Yb5?E2V&BE>MyCX9qt!Rz&t=Jw}{ARNSO#7wC z)|A8b;K(m!_H^$3RlWSP+=e79<+fEftl&k1ujw!tws=*-zSF|{ zT4dl`@d=4tt}ianBnlU^4walk7AHlBv497}^JE48+DQS)$N*43*!Ezj&Wb&A+X2KB zUzKiyYC99z1zvkhZ{`O)Q(RpPIKXQtvN6e`N@P)5TE;{;Fasi=It>_}Tv@Kpb3D}! zVdZ`>sTC=w%!LJM@_BF=JACf)zh!sXUT=8^ba>I4qqjh7DuoG;dbULQ?XTf&Bk zw0Y2a2x%|RCY*dWTV>n4`=0R(+YVRy3v(y5m9wxp7Q8O!tSR}tCfqdLqDiO{y4gHg zsili6pvt36GE)JTwv18FGebUfu>CCx?^r0SK($#o%gW~qA@jlA{yvQ##%-yf?uj(N ze2(=L?+NZ2UH2~e(fu3`N#}ubApW_N5f1nmyvRDw{RC6}TZn!#{n+keX4E}NW~ON5 z$SM#BNuEovu8tv8)bD2nSj6@FEe36iZ7E&`&U_|xWTtO|_8b$}3PWA3`X?keIpy{& zD6c0A$JCcuju@x=4XJ|px=!c^E<=Q)*=I`cPLS?*%CwWA+w5Y6R=8&R6EzmYzw!}( z#g*52I09N##iahIgNix`4DI)MgNM%{^PTp-t#HGo2U0oU}Ye=Ag0kPKS6ym2@U>X(HoI+?G>klP^K2LYhkb z&+M6D0#TaUqd`uLlwteHU$&PZEH4tS8{{D}&oL|gRWt|+9P{ZB?2&6)V+kcw7nhc3 zz?*o5T`3J8H649?TJWkdBs%`FdMB`A&xnmVSobm{4ndx@+*)k}>j2$_H>_7ZxB8r> zC7<20Pd7d=5OfEf{oaX?sV!MY(pRh5#8-_~)JWO>^4kBP#uT=#in|AcD!u-E8G8Hs zg-AxuqV%b>z^#CfLA2cBi3Vtp>1uG*WI(Q@%TPD;xH5Gpq6l7pLgrRsS}`@fQjy^s zu%5&yP7kb}=&>^U0Hk#O9I1rZ17hXfh8*NC;lB)f86HCLfrm%Qim=Cbq>ZgT#)FNEZ(4R&tHNYS0;N9cjV zAldAJwHt2&yeo<_6aIXPl?X|-LGULESF!_soAuLnu4V$CCGYq+<|Kp7!75XLcbt4r zB~RDgm3y~_-E>N)UkqSHFFDB$DzM+=#V!|fQ?2XtUOxnT!(d` z?UXlk(P7Z|2R>3Pym-aA(0Sz91PeHB(;JI|o`@|Ra%Wl|J zSWeC&ld9JvHYpmAB`)Q5Bo`=-I;K8~v6?H0(V-1ep~*A+qhzl*`g``&s5{C4x8bn$ zGnqZxc2xOAf|BH3nxFVYS{`5?l>6nOUZ0kfZX0Ws3f2F!fRFL>{l!ccs-w_%Bj-1p zGm0L^-GLzK;un0AE~Wyh8q2N$Q{RqI-Wi~)jzRlGj$UR*(nn0Zu0LeI&jg0u9E~(~-CdR( zlb>MUoAX^rp(m|i;J8Mwqm{WU4iNjaBoEEUz#J*OQQRu(XfXErRI${Ma zdBR=@?dVzmm0%sfVfi%>^G0E!M54#7K=pjtBm*n)-X`F=75er0((hukOP{W~>-GXm z+KZu?m!*?mZ5u1Dy%?ZU*YOLL*rK(%83x^Sh^dl8cLsk?$@RN^?hCB~X=fDX5ylYG z?w}=?pf28qGO5^0d2w+}A=V*ahTAZPc6v7Xt;nMC8qzr4us$Q7jXCf-{M^fz`uA_9 zL$pUN)U}?uKTvJ>9Y9?JJvgh^XOeO`4{r9^ta7X;#lfG)Z%5H0Q%!L>P|qt)zad)3 zH7IL7lrrX-!1BoJy|H$Czib{-Uj=57=3fwcU1}nWBuXK11ItE8bzal_KL%orB2| zrh!9K9&FpwYySA3lxfgkG=e0{j10StVy-bUJu?1~3eVox>L0J(a+EjndTX(>g<5g! z%ej@3X7p#3dDy2LZ%|^O9fT}nP$yyF+x;TJo4;}BrQyXMo5Mij%POae@O-7Z+odT@ zTmn7btYhl7z@F6a3a*|!BJ7H=UeWY?97(_0|s#QEz`U~9!C{R%N=T|K1448{V zU*DcGI)?H$C#=VnM>zYfQj~g|z9v}nkQANpCf~DPMKqLX3B-@qT)CmdLTK>Wv9FYepqTaBVyZ$vd11gj&I(kfh#A+H2s-EnQO0uQ`L71t?OrY*LPC7(1W z4X?axEvq(U2AgbrCDDN zoI95dX0E%N7`-AbFud`c<41uvO-lN8Rn($|3d)}vtkvN7`4Tr*kX&ddJM`WHK!qs= z*37QKZW52LW$by;H|S4yxAPBM15U=;YihM8h!%@Km@D+lb~2F?a9(~_`P2AWM#8hv z_{w{KD!<0dwy;9$hB#ziUX9Ub=*!$=lGPwh!rLpgh|u5JNXc!UzpkB%EP+cDU1+Wi zyj%OqT#-pBruGZpx_86UM7^Vp_h5%}4z|>1(9(O4^%3`;Sk3ICz~8ylEFq)YZhUGA zf1%_fQc_P;@j?#25U?#K?I?Jt&cSir*d66;)&Za|7#P7K+5oe?@h315#EJ*Wvra8c zMKugw=FVet+GQ_Kc@75^tba#210H52 zh0`scY?)_P$oEUROTv>|0cWxXVIpEZ3Rm6I25|9AK3?bhAIoMB zd6l{hZ^eSRqk;M8=un^82d3M^(4T_j<;ec%_Dk|s8LOu360t~Wx3#FV0RFx!N~5yu zh?w-(s=n0~Qs2&*3#S!^@<_vQk3r`$wjbl8EX58)&hwlz8G^_YZ8ZK>LWAedluZ~5 z?`P+u25{50udAtRSnL%QmV*pvXXdLp24I3NJx*&pHOYXf<+kZg?}IG&3l>F1So{Bl zN|yk?PqL0kSdr$fT@hKLmEMAj+(jGZL*|4RQfyq++q+7yM7;^11B{sRInDWJ#RN8D z?4bGd$o#9vylDO~xBC6s2(GGSW37fg(gWdo6IidWL9|CeN%r7QSL zPw0dwc7^loh25I>Sd2d5lZ1uvHUP+kLHpCiV+9sfo>ZJYINle?@LhUBW(;VaNGmT+R4k(HNNHc|z3R_k-QMQHTAdPPZXG%nw~nYzY(v9)N< zP=Kw+^zaY(wv?ydfOQJqRg?+U>uV@hudq_P)Mis$0uUH4JGsp-0Oq?;#ZXxkO9|(z?IK0e8$9>%eY$=A7nc;FzO2^`< z8$%O-`5^P;bnF@gakRg{B&0A+nXI1oLccX343(n{{fC)8809$CW>9k9G2FU(yX%K5 z73IQ`^z;MohIa8)h6@_N#EtC7QrwReW*2R`>)_jdtfNRSq=oY}uf!YD+@7(}a$BhAK|biT5lh;20gVS^xf{rF8M6sJ zk?+XRcdkun(U{<=E1!+A#(?}b>Or~!$p8YeMXA&UoEXyeX_O@&=i%~ejUOO&i$320 zM+P0So`-OhVew)Xb%V|J@#K0D;(4aIlS4`eGHwuk^D59Ms|k&|>!}V6F8=8WditJ} z)0_~AcQ-Y?os|%HW}(IYO*P5s0|F5or4X-{kWcB^&Y39-5Ic7e5y34@>;ilhA|(uo zG9!wwTo-eBi4b{T1!Glo%Vm+6PfTF3?)NDwjgo~wvM3XQgQuD0#wo4o;S8ShvtE~~ z0I1`poK%*$$T;I+w=&BJpq)(XDjInDUcHA}?RSpKrI+ZKkmG*&>SeFjdPup%K@r3B zerZCXVQ!?G0$@t&MxB@fsw%y9&?XF|w~}80+$shKyLX`6RsASKY3uZBZX>M&Uz*j+nH{Qw^oi9D z$=&LPE*h`ck`?2+02{h5#Mjz5i0>|2p$ap3Ng4neS~#2gz2!nD@41M zOdZ;S((`?n`^gxakD~EQscuA7-Qn)wisl{)U^YsoW|AS+CwHeq>SZdvcUPhiqjAvd z3%~fJPiKJVmlWELpFDlA`e?L)Xll*1^iJr^B`-SG$;BuJ-uYl*4sldqmMGy)_gnPm>z#XMRWI`%WaXgZ#g^~#AH1A8+}}Pt%7@&L$}Z(@iHvDm82VSwJB)I{ zJLRBlzMSAGG+lK&VD4!Www$_iyNO3r(lujpD5y=Hj6d%i z9{Y0v4X4!6)?L{Co=xVqT}->7uKcv``??`UqHk=YXO}_@X5ObGvBhuUG2rObJINI{ zDA*>t4-Au%qMr1GPjH}XCRo`Zy~r$;j#%KHl)HCT@hJ;sg8D}BnJKTF-(I&ivYBB& z(>kUw({}XxY^&iI`EH}vhpmq!qxp;0?L3wNLMA=?hH%U|{y{55Map!L+c*UU(PTHU zLaZ83UNp%NpYUeC=v$p`95vWu;~snY;U^wibs*>vzCV}1wDR?lKnysEB}j+io>B{Y z!~59uuRrQ?u&cSH@|R&8&>#DNA@adF3E%Ubl7ki;P8Tb*1xD6w&LO7X2U26k0%(j+ zS=pZ~2PN{|6cT-3_cz8HO0`ry^69iJ|HV~5v@4DXR*d)hx64+`_ltZ|FrSat;?TuQ>vDvX%U0gWmBo@6}(R>3tR5@?-?w~z0+C{OwiWhXKWv<+3Vt%jAga({6nn8z;9?;bL-Bc6dykfvtAo^CE zIa$K{-pU86-qv{JZ{N=I@pgUE0UznLZ(g+N{o5L=h*xEw>g4K9Kfn!TI8EShdj*FY3POl1B zyMv;U9M<0BdZyUk!L%pM?YZy-O(j8CEyJ898Rx`idpkEMdLVc$)V#KQN{%5S*@fG& zPG$7XK#Y#ZOkv}5WNSf?-wP{azD(htOJfv{SH4Y2(&G@tl-29Q9%vWHXpiLc*5**| zR0n(%A-{|1_vM`y9mA;P?$8|HNR#~IUOa3#45OW6Km{`ip3u-#`uynz+Fxt5PUv&LS!AA7*rqo;|z?wP@QmI2gof8%#t;mKD@N_krX zOta$pjadtR+<|pGg&z3zJe6^3e(-T3xmnZ6qZ*hSd+z<7IZpSPG(zVsXMW0F7tJeB znI!#u09bmtSMV^gzH=J0B|U*(f}BJZ!(sAt@&A3%X4bJ#yJ)@;8M{ z8;vTDa$|nDvY<}sg;oZxhI>klahY`c<f-IV4$V1!u&aYl2Cq?L_)FPHX@XJw<89Wg;!(G|R;7bJRNVPXv}S(c`x6_s+aO@P zL}=Ss$I#|)a-|j=-fc8|Aazqn7vI(6&w8`Xyt^1E@pTZTnf4EWsfT&d_bVJ9 zYBxU41X7 z#;!_5C>}6d`x_1GAusER_s)dJyvVbNw*Qs3{p-Ic&j68^ozOp2vuA7{0hd|BV&DF^ zB>Tvi+yT@*QxQ|FYnJTa`WJwFA!mV^P2U>BDmtxU6;C;Pl_fa=_C)Yy(tS zf`(uI>G8t~Tu#!%ir+kN^K4@FP%`C3(ocx9j{d;7|Rrj!My^=RyAml9_R+ literal 0 HcmV?d00001 diff --git a/docs/project/release-process.md b/docs/project/release-process.md index f7102d13c3..e9f3295d91 100644 --- a/docs/project/release-process.md +++ b/docs/project/release-process.md @@ -28,3 +28,30 @@ For Feast maintainers, these are the concrete steps for making a new release. 4. Try the dry run first with your personal access token. If this succeeds, uncheck `Dry Run` and run the release workflow. 5. All of the jobs should succeed besides the UI job which needs to be released separately. Ping a maintainer on Slack to run the UI release manually. 6. Try to install the feast release in your local environment and test out the `feast init` -> `feast apply` workflow to verify as a sanity check that the release worked correctly. + +### (for minor releases) Post-release steps +1. Create a new branch based on master (i.e. v0.22-branch) and push to the main Feast repo. This will be where cherry-picks go for future patch releases and where documentation will point. +2. Write a summary of the release in the GitHub release + 1. By default, Semantic Release will pull in messages from commits (features vs fixes, etc). But this is hard to digest still, so it helps to have a high level overview. + +### Update documentation + +In the Feast Gitbook (ask [Danny Chiao](https://tectonfeast.slack.com/team/U029405HFEU) in Slack for access): +1. Create a new space within the Feast collection +2. Go to the overflow menu on the top -> Synchronize with Git + 1. Specify GitHub as the provider + + ![](new_branch_part_1.png) + 2. Configure to point to the new release branch + + ![](new_branch_part_2.png) +3. Publish the new page for this branch as part of the collection + + ![](new_branch_part_3.png) +4. Go back to the main Feast collection and go to the overflow menu -> "Customize collection" + + ![](new_branch_part_3.png) +5. Configure the default space to be your new branch and save + + ![](new_branch_part_5.png) +6. Verify on docs.feast.dev that this new space is the default (this may take a few minutes to propagate, and your browser cache may be caching the old branch as the default) \ No newline at end of file diff --git a/docs/project/versioning-policy.md b/docs/project/versioning-policy.md index 8e51676355..b1ff2c75e7 100644 --- a/docs/project/versioning-policy.md +++ b/docs/project/versioning-policy.md @@ -23,24 +23,18 @@ In general, unless you're committing code that only applies to a particular rele The following table shows the **status** \(stable, beta, or alpha\) of Feast components. -Application status indicators for Feast: +Component status indicators for Feast: * **Stable** means that the component has reached a sufficient level of stability and adoption that the Feast community has deemed the component stable. Please see the stability criteria below. * **Beta** means that the component is working towards a version 1.0 release. Beta does not mean a component is unstable, it simply means the component has not met the full criteria of stability. * **Alpha** means that the component is in the early phases of development and/or integration into Feast. -| Application | Status | Notes | -| :--- | :--- | :--- | -| [Feast Serving](https://github.com/feast-dev/feast-java) | Beta | APIs are considered stable and will not have breaking changes within 3 minor versions. | -| [Feast Core](https://github.com/feast-dev/feast-java) | Beta | At risk of deprecation | -| [Feast Java Client](https://github.com/feast-dev/feast-java) | Beta | | -| [Feast Python SDK](https://github.com/feast-dev/feast) | Beta | | -| [Feast Go Client](https://github.com/feast-dev/feast) | Beta | | -| [Feast Spark Python SDK](https://github.com/feast-dev/feast-spark) | Alpha | | -| [Feast Spark Launchers](https://github.com/feast-dev/feast-spark) | Alpha | | -| [Feast Job Service](https://github.com/feast-dev/feast-spark) | Alpha | Scheduled for deprecation | -| [Feast Helm Chart](https://github.com/feast-dev/feast-helm-charts) | Beta | | -| | | | +| Component | Status | Notes | +|:---------------------------------------------------------------------------------|:-------| :--- | +| [Feast Python SDK](https://github.com/feast-dev/feast/tree/master/sdk/python) | Stable | | +| [Feast Go Feature Server](https://github.com/feast-dev/feast/tree/master/) | Beta | | +| [Feast Java Feature Server](https://github.com/feast-dev/feast/tree/master/java) | Alpha | | +| | | | Criteria for reaching _**stable**_ status: diff --git a/docs/roadmap.md b/docs/roadmap.md index 8461256a15..ae46eb9005 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -4,7 +4,7 @@ The list below contains the functionality that contributors are planning to deve * Items below that are in development (or planned for development) will be indicated in parentheses. * We welcome contribution to all items in the roadmap! -* Want to speak to a Feast contributor? We are more than happy to jump on a call. Please schedule a time using [Calendly](https://calendly.com/d/x2ry-g5bb/meet-with-feast-team). +* Have questions about the roadmap? Go to the Slack channel to ask on #feast-development * **Data Sources** * [x] [Snowflake source](https://docs.feast.dev/reference/data-sources/snowflake) @@ -16,7 +16,6 @@ The list below contains the functionality that contributors are planning to deve * [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/data-sources/postgres) * [x] [Spark (contrib plugin)](https://docs.feast.dev/reference/data-sources/spark) * [x] Kafka / Kinesis sources (via [push support into the online store](https://docs.feast.dev/reference/data-sources/push)) - * [ ] HTTP source * **Offline Stores** * [x] [Snowflake](https://docs.feast.dev/reference/offline-stores/snowflake) * [x] [Redshift](https://docs.feast.dev/reference/offline-stores/redshift) @@ -49,26 +48,17 @@ The list below contains the functionality that contributors are planning to deve * **Deployments** * [x] AWS Lambda (Alpha release. See [RFC](https://docs.google.com/document/d/1eZWKWzfBif66LDN32IajpaG-j82LSHCCOzY6R7Ax7MI/edit)) * [x] Kubernetes (See [guide](https://docs.feast.dev/how-to-guides/running-feast-in-production#4.3.-java-based-feature-server-deployed-on-kubernetes)) - * [ ] Cloud Run - * [ ] KNative * **Feature Serving** * [x] Python Client - * [x] REST Feature Server (Python) (Alpha release. See [RFC](https://docs.google.com/document/d/1iXvFhAsJ5jgAhPOpTdB3j-Wj1S9x3Ev\_Wr6ZpnLzER4/edit)) - * [x] gRPC Feature Server (Java) (See [#1497](https://github.com/feast-dev/feast/issues/1497)) - * [x] Push API - * [ ] Java Client - * [ ] Go Client - * [ ] Delete API - * [ ] Feature Logging (for training) + * [x] REST Feature Server (Python) (See [RFC](https://docs.google.com/document/d/1iXvFhAsJ5jgAhPOpTdB3j-Wj1S9x3Ev\_Wr6ZpnLzER4/edit)) + * [x] REST / gRPC Feature Server (Go) (Alpha release. See [docs](https://docs.feast.dev/reference/feature-servers/go-feature-retrieval) + * [x] gRPC Feature Server (Java) (Alpha release. See [#1497](https://github.com/feast-dev/feast/issues/1497)) * **Data Quality Management (See [RFC](https://docs.google.com/document/d/110F72d4NTv80p35wDSONxhhPBqWRwbZXG4f9mNEMd98/edit))** * [x] Data profiling and validation (Great Expectations) - * [ ] Training-serving skew detection (in progress) - * [ ] Metric production - * [ ] Drift detection * **Feature Discovery and Governance** * [x] Python SDK for browsing feature registry * [x] CLI for browsing feature registry * [x] Model-centric feature tracking (feature services) * [x] Amundsen integration (see [Feast extractor](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/feast_extractor.py)) - * [x] Feast Web UI (Alpha release. See [documentation](https://docs.feast.dev/reference/alpha-web-ui)) - * [ ] REST API for browsing feature registry + * [x] DataHub integration (see [DataHub Feast docs](https://datahubproject.io/docs/generated/ingestion/sources/feast/)) + * [x] Feast Web UI (Alpha release. See [documentation](https://docs.feast.dev/reference/alpha-web-ui)) \ No newline at end of file diff --git a/sdk/python/tests/unit/infra/test_key_encoding_utils.py b/sdk/python/tests/unit/infra/test_key_encoding_utils.py index 449d6819a1..df691ea21e 100644 --- a/sdk/python/tests/unit/infra/test_key_encoding_utils.py +++ b/sdk/python/tests/unit/infra/test_key_encoding_utils.py @@ -9,14 +9,14 @@ def test_serialize_entity_key(): # Should be fine serialize_entity_key( EntityKeyProto( - join_keys=["user"], entity_values=[ValueProto(int64_val=int(2 ** 15))] + join_keys=["user"], entity_values=[ValueProto(int64_val=int(2**15))] ), entity_key_serialization_version=2, ) # True int64, but should also be fine. serialize_entity_key( EntityKeyProto( - join_keys=["user"], entity_values=[ValueProto(int64_val=int(2 ** 31))] + join_keys=["user"], entity_values=[ValueProto(int64_val=int(2**31))] ), entity_key_serialization_version=2, ) @@ -25,6 +25,6 @@ def test_serialize_entity_key(): with pytest.raises(BaseException): serialize_entity_key( EntityKeyProto( - join_keys=["user"], entity_values=[ValueProto(int64_val=int(2 ** 31))] + join_keys=["user"], entity_values=[ValueProto(int64_val=int(2**31))] ), ) From a965af988f9dadb59843ac34d031eeee1d6ca1a7 Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Wed, 20 Jul 2022 15:48:09 -0700 Subject: [PATCH 45/73] docs: Add docs for batch materialization engine (#2959) Signed-off-by: Achal Shah --- docs/SUMMARY.md | 2 + .../architecture-and-components/README.md | 3 +- .../batch-materialization-engine.md | 10 ++ docs/getting-started/concepts/README.md | 2 +- ...reating-a-custom-materialization-engine.md | 125 ++++++++++++++++++ .../feature-repository/feature-store-yaml.md | 1 + sdk/python/docs/index.rst | 23 ++++ sdk/python/docs/source/conf.py | 2 +- .../feast.infra.materialization.lambda.rst | 29 ++++ .../source/feast.infra.materialization.rst | 8 ++ sdk/python/docs/source/index.rst | 20 +++ .../feast/infra/materialization/__init__.py | 3 +- 12 files changed, 223 insertions(+), 5 deletions(-) create mode 100644 docs/getting-started/architecture-and-components/batch-materialization-engine.md create mode 100644 docs/how-to-guides/creating-a-custom-materialization-engine.md create mode 100644 sdk/python/docs/source/feast.infra.materialization.lambda.rst diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index 23049455e3..6a216a332f 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -25,6 +25,7 @@ * [Offline store](getting-started/architecture-and-components/offline-store.md) * [Online store](getting-started/architecture-and-components/online-store.md) * [Provider](getting-started/architecture-and-components/provider.md) + * [Batch Materialization Engine](getting-started/architecture-and-components/batch-materialization-engine.md) * [Learning by example](getting-started/feast-workshop.md) * [Third party integrations](getting-started/third-party-integrations.md) * [FAQ](getting-started/faq.md) @@ -53,6 +54,7 @@ * [Deploying a Java feature server on Kubernetes](how-to-guides/fetching-java-features-k8s.md) * [Upgrading from Feast 0.9](https://docs.google.com/document/u/1/d/1AOsr\_baczuARjCpmZgVd8mCqTF4AZ49OEyU4Cn-uTT0/edit) * [Adding a custom provider](how-to-guides/creating-a-custom-provider.md) +* [Adding a custom batch materialization engine](how-to-guides/creating-a-custom-materialization-engine.md) * [Adding a new online store](how-to-guides/adding-support-for-a-new-online-store.md) * [Adding a new offline store](how-to-guides/adding-a-new-offline-store.md) * [Adding or reusing tests](how-to-guides/adding-or-reusing-tests.md) diff --git a/docs/getting-started/architecture-and-components/README.md b/docs/getting-started/architecture-and-components/README.md index c3286b8315..8a6e181ea7 100644 --- a/docs/getting-started/architecture-and-components/README.md +++ b/docs/getting-started/architecture-and-components/README.md @@ -12,5 +12,4 @@ {% page-ref page="provider.md" %} - - +{% page-reg page="batch-materialization-engine.md" %} diff --git a/docs/getting-started/architecture-and-components/batch-materialization-engine.md b/docs/getting-started/architecture-and-components/batch-materialization-engine.md new file mode 100644 index 0000000000..da21bd4c59 --- /dev/null +++ b/docs/getting-started/architecture-and-components/batch-materialization-engine.md @@ -0,0 +1,10 @@ +# Batch Materialization Engine + +A batch materialization engine is a component of Feast that's responsible for moving data from the offline store into the online store. + +A materialization engine abstracts over specific technologies or frameworks that are used to materialize data. It allows users to use a pure local serialized approach (which is the default LocalMaterializationEngine), or delegates the materialization to seperate components (e.g. AWS Lambda, as implemented by the the LambdaMaterializaionEngine). + +If the built-in engines are not sufficient, you can create your own custom materialization engine. Please see [this guide](../../how-to-guides/creating-a-custom-materialization-engine.md) for more details. + +Please see [feature\_store.yaml](../../reference/feature-repository/feature-store-yaml.md#overview) for configuring providers. + diff --git a/docs/getting-started/concepts/README.md b/docs/getting-started/concepts/README.md index 6f2f64955d..0fc415f059 100644 --- a/docs/getting-started/concepts/README.md +++ b/docs/getting-started/concepts/README.md @@ -18,4 +18,4 @@ {% page-ref page="point-in-time-joins.md" %} -{% page-ref page="registry.md" %} \ No newline at end of file +{% page-ref page="registry.md" %} diff --git a/docs/how-to-guides/creating-a-custom-materialization-engine.md b/docs/how-to-guides/creating-a-custom-materialization-engine.md new file mode 100644 index 0000000000..935ac3dc99 --- /dev/null +++ b/docs/how-to-guides/creating-a-custom-materialization-engine.md @@ -0,0 +1,125 @@ +# Adding a custom materialization engine + +### Overview + +Feast batch materialization operations (`materialize` and `materialize-incremental`) execute through a `BatchMaterializationEngine`. + +Custom batch materialization engines allow Feast users to extend Feast to customize the materialization process. Examples include: + +* Setting up custom materialization-specific infrastructure during `feast apply` (e.g. setting up Spark clusters or Lambda Functions) +* Launching custom batch ingestion \(materialization\) jobs \(Spark, Beam, AWS Lambda\) +* Tearing down custom materialization-specific infrastructure during `feast teardown` (e.g. tearing down Spark clusters, or deleting Lambda Functions) + +Feast comes with built-in materialization engines, e.g, `LocalMaterializationEngine`, and an experimental `LambdaMaterializationEngine`. However, users can develop their own materialization engines by creating a class that implements the contract in the [BatchMaterializationEngine class](https://github.com/feast-dev/feast/blob/6d7b38a39024b7301c499c20cf4e7aef6137c47c/sdk/python/feast/infra/materialization/batch_materialization_engine.py#L72). + +### Guide + +The fastest way to add custom logic to Feast is to extend an existing materialization engine. The most generic engine is the `LocalMaterializationEngine` which contains no cloud-specific logic. The guide that follows will extend the `LocalProvider` with operations that print text to the console. It is up to you as a developer to add your custom code to the engine methods, but the guide below will provide the necessary scaffolding to get you started. + +#### Step 1: Define an Engine class + +The first step is to define a custom materialization engine class. We've created the `MyCustomEngine` below. + +```python +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from feast.entity import Entity +from feast.feature_view import FeatureView +from feast.batch_feature_view import BatchFeatureView +from feast.stream_feature_view import StreamFeatureView +from feast.infra.materialization import LocalMaterializationEngine, LocalMaterializationJob, MaterializationTask +from feast.infra.offline_stores.offline_store import OfflineStore +from feast.infra.online_stores.online_store import OnlineStore +from feast.repo_config import RepoConfig + + +class MyCustomEngine(LocalMaterializationEngine): + def __init__( + self, + *, + repo_config: RepoConfig, + offline_store: OfflineStore, + online_store: OnlineStore, + **kwargs, + ): + super().__init__( + repo_config=repo_config, + offline_store=offline_store, + online_store=online_store, + **kwargs, + ) + + def update( + self, + project: str, + views_to_delete: Sequence[ + Union[BatchFeatureView, StreamFeatureView, FeatureView] + ], + views_to_keep: Sequence[ + Union[BatchFeatureView, StreamFeatureView, FeatureView] + ], + entities_to_delete: Sequence[Entity], + entities_to_keep: Sequence[Entity], + ): + print("Creating new infrastructure is easy here!") + pass + + def materialize( + self, registry, tasks: List[MaterializationTask] + ) -> List[LocalMaterializationJob]: + print("Launching custom batch jobs or multithreading things is pretty easy...") + return [ + self._materialize_one( + registry, + task.feature_view, + task.start_time, + task.end_time, + task.project, + task.tqdm_builder, + ) + for task in tasks + ] + +``` + +Notice how in the above engine we have only overwritten two of the methods on the `LocalMaterializatinEngine`, namely `update` and `materialize`. These two methods are convenient to replace if you are planning to launch custom batch jobs. + +#### Step 2: Configuring Feast to use the engine + +Configure your [feature\_store.yaml](../reference/feature-repository/feature-store-yaml.md) file to point to your new engine class: + +```yaml +project: repo +registry: registry.db +batch_engine: feast_custom_engine.MyCustomEngine +online_store: + type: sqlite + path: online_store.db +offline_store: + type: file +``` + +Notice how the `batch_engine` field above points to the module and class where your engine can be found. + +#### Step 3: Using the engine + +Now you should be able to use your engine by running a Feast command: + +```bash +feast apply +``` + +```text +Registered entity driver_id +Registered feature view driver_hourly_stats +Deploying infrastructure for driver_hourly_stats +Creating new infrastructure is easy here! +``` + +It may also be necessary to add the module root path to your `PYTHONPATH` as follows: + +```bash +PYTHONPATH=$PYTHONPATH:/home/my_user/my_custom_engine feast apply +``` + +That's it. You should now have a fully functional custom engine! diff --git a/docs/reference/feature-repository/feature-store-yaml.md b/docs/reference/feature-repository/feature-store-yaml.md index fa10149cfe..a87e09ba43 100644 --- a/docs/reference/feature-repository/feature-store-yaml.md +++ b/docs/reference/feature-repository/feature-store-yaml.md @@ -24,5 +24,6 @@ The following top-level configuration options exist in the `feature_store.yaml` * **online_store** — Configures the online store. * **offline_store** — Configures the offline store. * **project** — Defines a namespace for the entire feature store. Can be used to isolate multiple deployments in a single installation of Feast. Should only contain letters, numbers, and underscores. +* **engine** - Configures the batch materialization engine. Please see the [RepoConfig](https://rtd.feast.dev/en/latest/#feast.repo_config.RepoConfig) API reference for the full list of configuration options. diff --git a/sdk/python/docs/index.rst b/sdk/python/docs/index.rst index 9297901c33..07b9d9a77e 100644 --- a/sdk/python/docs/index.rst +++ b/sdk/python/docs/index.rst @@ -250,18 +250,21 @@ Sqlite Online Store .. automodule:: feast.infra.online_stores.sqlite :members: + :noindex: Datastore Online Store ---------------------- .. automodule:: feast.infra.online_stores.datastore :members: + :noindex: DynamoDB Online Store --------------------- .. automodule:: feast.infra.online_stores.dynamodb :members: + :noindex: Redis Online Store ------------------ @@ -283,3 +286,23 @@ HBase Online Store .. automodule:: feast.infra.online_stores.contrib.hbase_online_store.hbase :members: :noindex: + + +Batch Materialization Engine +============================ + +.. automodule:: feast.infra.materialization + :members: BatchMaterializationEngine, MaterializationJob, MaterializationTask + +Local Engine +------------ +.. autoclass:: feast.infra.materialization.LocalMaterializationEngine + :members: + :noindex: + +(Alpha) Lambda Based Engine +--------------------------- + +.. autoclass:: feast.infra.materialization.lambda.lambda_engine + :members: + :noindex: diff --git a/sdk/python/docs/source/conf.py b/sdk/python/docs/source/conf.py index 8f873d21b6..b311a19664 100644 --- a/sdk/python/docs/source/conf.py +++ b/sdk/python/docs/source/conf.py @@ -115,7 +115,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +html_static_path = [] # -- Options for HTMLHelp output ------------------------------------------ diff --git a/sdk/python/docs/source/feast.infra.materialization.lambda.rst b/sdk/python/docs/source/feast.infra.materialization.lambda.rst new file mode 100644 index 0000000000..7ca1d44314 --- /dev/null +++ b/sdk/python/docs/source/feast.infra.materialization.lambda.rst @@ -0,0 +1,29 @@ +feast.infra.materialization.lambda package +========================================== + +Submodules +---------- + +feast.infra.materialization.lambda.app module +--------------------------------------------- + +.. automodule:: feast.infra.materialization.lambda.app + :members: + :undoc-members: + :show-inheritance: + +feast.infra.materialization.lambda.lambda\_engine module +-------------------------------------------------------- + +.. automodule:: feast.infra.materialization.lambda.lambda_engine + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.infra.materialization.lambda + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.materialization.rst b/sdk/python/docs/source/feast.infra.materialization.rst index 49fdc404cb..ff3e1cf135 100644 --- a/sdk/python/docs/source/feast.infra.materialization.rst +++ b/sdk/python/docs/source/feast.infra.materialization.rst @@ -1,6 +1,14 @@ feast.infra.materialization package =================================== +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + feast.infra.materialization.lambda + Submodules ---------- diff --git a/sdk/python/docs/source/index.rst b/sdk/python/docs/source/index.rst index 9297901c33..bc034c295d 100644 --- a/sdk/python/docs/source/index.rst +++ b/sdk/python/docs/source/index.rst @@ -283,3 +283,23 @@ HBase Online Store .. automodule:: feast.infra.online_stores.contrib.hbase_online_store.hbase :members: :noindex: + + +Batch Materialization Engine +============================ + +.. automodule:: feast.infra.materialization + :members: BatchMaterializationEngine, MaterializationJob, MaterializationTask + +Local Engine +------------ +.. autoclass:: feast.infra.materialization.LocalMaterializationEngine + :members: + :noindex: + +(Alpha) Lambda Based Engine +--------------------------- + +.. autoclass:: feast.infra.materialization.lambda.lambda_engine + :members: + :noindex: diff --git a/sdk/python/feast/infra/materialization/__init__.py b/sdk/python/feast/infra/materialization/__init__.py index 6be653b26e..815f98739b 100644 --- a/sdk/python/feast/infra/materialization/__init__.py +++ b/sdk/python/feast/infra/materialization/__init__.py @@ -3,11 +3,12 @@ MaterializationJob, MaterializationTask, ) -from .local_engine import LocalMaterializationEngine +from .local_engine import LocalMaterializationEngine, LocalMaterializationJob __all__ = [ "MaterializationJob", "MaterializationTask", "BatchMaterializationEngine", "LocalMaterializationEngine", + "LocalMaterializationJob", ] From 46d11bc04dc9d550864fe4a95d4b7c2aa81ba0b9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Jul 2022 15:49:07 -0700 Subject: [PATCH 46/73] chore(deps): Bump terser from 5.10.0 to 5.14.2 in /ui (#2961) Bumps [terser](https://github.com/terser/terser) from 5.10.0 to 5.14.2. - [Release notes](https://github.com/terser/terser/releases) - [Changelog](https://github.com/terser/terser/blob/master/CHANGELOG.md) - [Commits](https://github.com/terser/terser/commits) --- updated-dependencies: - dependency-name: terser dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- ui/yarn.lock | 61 +++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 46 insertions(+), 15 deletions(-) diff --git a/ui/yarn.lock b/ui/yarn.lock index 78c069b38a..ad31cbeac5 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -1476,15 +1476,37 @@ "@types/yargs" "^16.0.0" chalk "^4.0.0" +"@jridgewell/gen-mapping@^0.3.0": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + "@jridgewell/resolve-uri@^3.0.3": - version "3.0.5" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.5.tgz#68eb521368db76d040a6315cdb24bf2483037b9c" - integrity sha512-VPeQ7+wH0itvQxnG+lIzWgkysKIr3L9sslimFW55rHMdGu/qCQ5z5h9zq4gI8uBtqkpHhsF4Z/OwExufUCThew== + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/source-map@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" + integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" "@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.11" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.11.tgz#771a1d8d744eeb71b6adb35808e1a6c7b9b8c8ec" - integrity sha512-Fg32GrJo61m+VqYSdRSjRXMjQ06j8YIYfcTqndLYVAaHmroZHLJZCydsWBOTDqXS2v+mjxohBWEMfg97GXmYQg== + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== "@jridgewell/trace-mapping@^0.3.0": version "0.3.4" @@ -1494,6 +1516,14 @@ "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" +"@jridgewell/trace-mapping@^0.3.9": + version "0.3.14" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" + integrity sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@mapbox/hast-util-table-cell-style@^0.2.0": version "0.2.0" resolved "https://registry.yarnpkg.com/@mapbox/hast-util-table-cell-style/-/hast-util-table-cell-style-0.2.0.tgz#1003f59d54fae6f638cb5646f52110fb3da95b4d" @@ -2803,10 +2833,10 @@ acorn@^7.0.0, acorn@^7.1.1: resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== -acorn@^8.2.4, acorn@^8.4.1, acorn@^8.7.0: - version "8.7.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.0.tgz#90951fde0f8f09df93549481e5fc141445b791cf" - integrity sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ== +acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.0: + version "8.7.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30" + integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A== address@^1.0.1, address@^1.1.2: version "1.1.2" @@ -9537,7 +9567,7 @@ source-map@^0.5.0, source-map@^0.5.3: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= -source-map@^0.7.3, source-map@~0.7.2: +source-map@^0.7.3: version "0.7.3" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== @@ -9970,12 +10000,13 @@ terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: terser "^5.7.2" terser@^5.0.0, terser@^5.10.0, terser@^5.7.2: - version "5.10.0" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.10.0.tgz#b86390809c0389105eb0a0b62397563096ddafcc" - integrity sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA== + version "5.14.2" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.2.tgz#9ac9f22b06994d736174f4091aa368db896f1c10" + integrity sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA== dependencies: + "@jridgewell/source-map" "^0.3.2" + acorn "^8.5.0" commander "^2.20.0" - source-map "~0.7.2" source-map-support "~0.5.20" test-exclude@^6.0.0: From 8534f69026d03e6e5964ef3e9bc69cc18397a879 Mon Sep 17 00:00:00 2001 From: "Ukjae Jeong (Jay)" Date: Thu, 21 Jul 2022 07:56:07 +0900 Subject: [PATCH 47/73] fix: Fix typo in CONTRIBUTING.md (#2955) fix: fix typo in CONTRIBUTING.md Signed-off-by: Jeong Ukjae --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9c25a835bd..a6148458a7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -95,7 +95,7 @@ pip install --upgrade pip make build-ui ``` -5Install development dependencies for Feast Python SDK / CLI +5. Install development dependencies for Feast Python SDK / CLI ```sh pip install -e ".[dev]" ``` From 23c09c83bc530de830ba867b10ceb02f113db5d6 Mon Sep 17 00:00:00 2001 From: William Horton Date: Wed, 20 Jul 2022 18:57:05 -0400 Subject: [PATCH 48/73] feat: Add CustomSourceOptions to SavedDatasetStorage (#2958) Add CustomSourceOptions to SavedDatasetStorage I am implementing a custom OfflineStore and found that I couldn't use CustomSourceOptions in SavedDatasetStorage: currently you'd have to try to fit your custom options into one of the existing implemented Options structs Signed-off-by: William Horton --- protos/feast/core/SavedDataset.proto | 1 + 1 file changed, 1 insertion(+) diff --git a/protos/feast/core/SavedDataset.proto b/protos/feast/core/SavedDataset.proto index 353e925ad1..53f06f73a9 100644 --- a/protos/feast/core/SavedDataset.proto +++ b/protos/feast/core/SavedDataset.proto @@ -58,6 +58,7 @@ message SavedDatasetStorage { DataSource.SnowflakeOptions snowflake_storage = 7; DataSource.TrinoOptions trino_storage = 8; DataSource.SparkOptions spark_storage = 9; + DataSource.CustomSourceOptions custom_storage = 10; } } From 5e45228a406e6ee7f82e41cab7f734730ff2e73f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ivan=20Kri=C5=BEani=C4=87?= <38288854+ikrizanic@users.noreply.github.com> Date: Thu, 21 Jul 2022 07:14:03 +0200 Subject: [PATCH 49/73] fix: Add dummy alias to pull_all_from_table_or_query (#2956) * Add dummy alias to pull_all_from_table_or_query Signed-off-by: Ivan Krizanic * Fix lint error Signed-off-by: Ivan Krizanic --- .../offline_stores/contrib/postgres_offline_store/postgres.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py index 415a46dde7..28944df72e 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py @@ -214,7 +214,7 @@ def pull_all_from_table_or_query( query = f""" SELECT {field_string} - FROM {from_expression} + FROM {from_expression} AS paftoq_alias WHERE "{timestamp_field}" BETWEEN '{start_date}'::timestamptz AND '{end_date}'::timestamptz """ From 661c0535f34b042846562a3fb4cdab4ab4403459 Mon Sep 17 00:00:00 2001 From: Felix Wang Date: Thu, 21 Jul 2022 14:56:07 -0700 Subject: [PATCH 50/73] fix: Do not allow same column to be reused in data sources (#2965) * Do not allow same column to be reused in data sources Signed-off-by: Felix Wang * Fix bug Signed-off-by: Felix Wang --- sdk/python/feast/data_source.py | 7 +++++++ sdk/python/tests/unit/test_data_sources.py | 10 ++++++++++ 2 files changed, 17 insertions(+) diff --git a/sdk/python/feast/data_source.py b/sdk/python/feast/data_source.py index 6ab7934371..a1e44b3186 100644 --- a/sdk/python/feast/data_source.py +++ b/sdk/python/feast/data_source.py @@ -273,6 +273,13 @@ def __init__( ), DeprecationWarning, ) + if ( + self.timestamp_field + and self.timestamp_field == self.created_timestamp_column + ): + raise ValueError( + "Please do not use the same column for 'timestamp_field' and 'created_timestamp_column'." + ) self.description = description or "" self.tags = tags or {} self.owner = owner or "" diff --git a/sdk/python/tests/unit/test_data_sources.py b/sdk/python/tests/unit/test_data_sources.py index 61891ccf1a..0b437e50b9 100644 --- a/sdk/python/tests/unit/test_data_sources.py +++ b/sdk/python/tests/unit/test_data_sources.py @@ -261,3 +261,13 @@ def test_proto_conversion(): assert DataSource.from_proto(kinesis_source.to_proto()) == kinesis_source assert DataSource.from_proto(push_source.to_proto()) == push_source assert DataSource.from_proto(request_source.to_proto()) == request_source + + +def test_column_conflict(): + with pytest.raises(ValueError): + _ = FileSource( + name="test_source", + path="test_path", + timestamp_field="event_timestamp", + created_timestamp_column="event_timestamp", + ) From ffab04c45183f7b8edf07aba47ddceeae75e9970 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Jul 2022 15:39:07 -0700 Subject: [PATCH 51/73] chore(deps): Bump terser from 5.13.1 to 5.14.2 in /sdk/python/feast/ui (#2962) Bumps [terser](https://github.com/terser/terser) from 5.13.1 to 5.14.2. - [Release notes](https://github.com/terser/terser/releases) - [Changelog](https://github.com/terser/terser/blob/master/CHANGELOG.md) - [Commits](https://github.com/terser/terser/commits) --- updated-dependencies: - dependency-name: terser dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/feast/ui/yarn.lock | 50 +++++++++++++++++++++++++---------- 1 file changed, 36 insertions(+), 14 deletions(-) diff --git a/sdk/python/feast/ui/yarn.lock b/sdk/python/feast/ui/yarn.lock index f2fd12b4e5..b44fc5f51a 100644 --- a/sdk/python/feast/ui/yarn.lock +++ b/sdk/python/feast/ui/yarn.lock @@ -1617,25 +1617,47 @@ "@jridgewell/set-array" "^1.0.0" "@jridgewell/sourcemap-codec" "^1.4.10" +"@jridgewell/gen-mapping@^0.3.0": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + "@jridgewell/resolve-uri@^3.0.3": - version "3.0.7" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.7.tgz#30cd49820a962aff48c8fffc5cd760151fca61fe" - integrity sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA== + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== "@jridgewell/set-array@^1.0.0": version "1.1.1" resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.1.tgz#36a6acc93987adcf0ba50c66908bd0b70de8afea" integrity sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ== +"@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/source-map@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" + integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" + "@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.13" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.13.tgz#b6461fb0c2964356c469e115f504c95ad97ab88c" - integrity sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w== + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== "@jridgewell/trace-mapping@^0.3.9": - version "0.3.12" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.12.tgz#ccd8cd83ad894bae98a79eecd6a885b211bfe217" - integrity sha512-6GMdw8fZlZjs9CJONrWeWyjl8zYqbyOMSxS9FABnEw3i+wz99SESjWMWRRIsbIp8HVsMeXggi5b7+a9qO6W1fQ== + version "0.3.14" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" + integrity sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ== dependencies: "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" @@ -9228,7 +9250,7 @@ source-map@^0.7.3: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== -source-map@^0.8.0-beta.0, source-map@~0.8.0-beta.0: +source-map@^0.8.0-beta.0: version "0.8.0-beta.0" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== @@ -9633,13 +9655,13 @@ terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: terser "^5.7.2" terser@^5.0.0, terser@^5.10.0, terser@^5.7.2: - version "5.13.1" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.13.1.tgz#66332cdc5a01b04a224c9fad449fc1a18eaa1799" - integrity sha512-hn4WKOfwnwbYfe48NgrQjqNOH9jzLqRcIfbYytOXCOv46LBfWr9bDS17MQqOi+BWGD0sJK3Sj5NC/gJjiojaoA== + version "5.14.2" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.2.tgz#9ac9f22b06994d736174f4091aa368db896f1c10" + integrity sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA== dependencies: + "@jridgewell/source-map" "^0.3.2" acorn "^8.5.0" commander "^2.20.0" - source-map "~0.8.0-beta.0" source-map-support "~0.5.20" test-exclude@^6.0.0: From a36a6950b34d718ad328b4faca0c178fb23a3100 Mon Sep 17 00:00:00 2001 From: Felix Wang Date: Thu, 21 Jul 2022 16:14:07 -0700 Subject: [PATCH 52/73] feat: Add Go option to `feast serve` command (#2966) Add Go option to `feast serve` command Signed-off-by: Felix Wang --- sdk/python/feast/cli.py | 33 ++++++++++++++++++++++++++------- 1 file changed, 26 insertions(+), 7 deletions(-) diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index 99e084b666..86a78e289b 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -118,21 +118,24 @@ def version(): "-h", type=click.STRING, default="0.0.0.0", - help="Specify a host for the server [default: 0.0.0.0]", + show_default=True, + help="Specify a host for the server", ) @click.option( "--port", "-p", type=click.INT, default=8888, - help="Specify a port for the server [default: 8888]", + show_default=True, + help="Specify a port for the server", ) @click.option( "--registry_ttl_sec", "-r", - help="Number of seconds after which the registry is refreshed. Default is 5 seconds.", + help="Number of seconds after which the registry is refreshed", type=int, default=5, + show_default=True, ) @click.pass_context def ui(ctx: click.Context, host: str, port: int, registry_ttl_sec: int): @@ -610,14 +613,16 @@ def init_command(project_directory, minimal: bool, template: str): "-h", type=click.STRING, default="127.0.0.1", - help="Specify a host for the server [default: 127.0.0.1]", + show_default=True, + help="Specify a host for the server", ) @click.option( "--port", "-p", type=click.INT, default=6566, - help="Specify a port for the server [default: 6566]", + show_default=True, + help="Specify a port for the server", ) @click.option( "--type", @@ -625,16 +630,25 @@ def init_command(project_directory, minimal: bool, template: str): "type_", type=click.STRING, default="http", - help="Specify a server type: 'http' or 'grpc' [default: http]", + show_default=True, + help="Specify a server type: 'http' or 'grpc'", +) +@click.option( + "--go", + is_flag=True, + show_default=True, + help="Use Go to serve", ) @click.option( "--no-access-log", is_flag=True, - help="Disable the Uvicorn access log.", + show_default=True, + help="Disable the Uvicorn access log", ) @click.option( "--no-feature-log", is_flag=True, + show_default=True, help="Disable logging served features", ) @click.pass_context @@ -643,6 +657,7 @@ def serve_command( host: str, port: int, type_: str, + go: bool, no_access_log: bool, no_feature_log: bool, ): @@ -651,6 +666,10 @@ def serve_command( cli_check_repo(repo) store = FeatureStore(repo_path=str(repo)) + if go: + # Turn on Go feature retrieval. + store.config.go_feature_retrieval = True + store.serve(host, port, type_, no_access_log, no_feature_log) From a233d3fc61c8a77d97e071addf31410cdae241fc Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Thu, 21 Jul 2022 22:16:09 -0700 Subject: [PATCH 53/73] chore: Fix test asserts for offline store write and improve some errors (#2964) * chore: Fix test asserts for offline store write and improve some error messages Signed-off-by: Achal Shah * wait for write to finish Signed-off-by: Achal Shah * wait for write to finish Signed-off-by: Achal Shah * detailed error messages Signed-off-by: Achal Shah * sort and reset index Signed-off-by: Achal Shah * fix Signed-off-by: Achal Shah --- sdk/python/feast/feature_store.py | 3 +- .../feast/infra/offline_stores/bigquery.py | 6 +- sdk/python/feast/usage.py | 2 +- .../offline_store/test_offline_write.py | 58 ++++++++++--------- 4 files changed, 37 insertions(+), 32 deletions(-) diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index ce2c98e1ea..4677d475f3 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -1131,8 +1131,7 @@ def create_saved_dataset( if not from_.metadata: raise ValueError( - "RetrievalJob must contains metadata. " - "Use RetrievalJob produced by get_historical_features" + f"The RetrievalJob {type(from_)} must implement the metadata property." ) dataset = SavedDataset( diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index 6c2bef757a..e3791f08c7 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -306,7 +306,7 @@ def write_logged_features( file_obj=f, destination=destination.table, job_config=job_config, - ) + ).result() return @@ -319,7 +319,7 @@ def write_logged_features( file_obj=parquet_temp_file, destination=destination.table, job_config=job_config, - ) + ).result() @staticmethod def offline_write_batch( @@ -373,7 +373,7 @@ def offline_write_batch( file_obj=parquet_temp_file, destination=feature_view.batch_source.table, job_config=job_config, - ) + ).result() class BigQueryRetrievalJob(RetrievalJob): diff --git a/sdk/python/feast/usage.py b/sdk/python/feast/usage.py index 471a1b9671..5e78aa52d2 100644 --- a/sdk/python/feast/usage.py +++ b/sdk/python/feast/usage.py @@ -35,7 +35,7 @@ USAGE_ENDPOINT = "https://usage.feast.dev" _logger = logging.getLogger(__name__) -_executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) +_executor = concurrent.futures.ThreadPoolExecutor(max_workers=3) _is_enabled = os.getenv(FEAST_USAGE, default=DEFAULT_FEAST_USAGE_VALUE) == "True" diff --git a/sdk/python/tests/integration/offline_store/test_offline_write.py b/sdk/python/tests/integration/offline_store/test_offline_write.py index 964793ffaa..b8c465946d 100644 --- a/sdk/python/tests/integration/offline_store/test_offline_write.py +++ b/sdk/python/tests/integration/offline_store/test_offline_write.py @@ -126,11 +126,12 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour "created": [ts, ts], }, ) + first_df = first_df.astype({"conv_rate": "float32", "acc_rate": "float32"}) store.write_to_offline_store( driver_stats.name, first_df, allow_registry_cache=False ) - after_write_df = store.get_historical_features( + after_write_df: pd.DataFrame = store.get_historical_features( entity_df=entity_df, features=[ "driver_stats:conv_rate", @@ -139,21 +140,26 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour ], full_feature_names=False, ).to_df() - - assert len(after_write_df) == len(first_df) - assert np.where( - after_write_df["conv_rate"].reset_index(drop=True) - == first_df["conv_rate"].reset_index(drop=True) - ) - assert np.where( - after_write_df["acc_rate"].reset_index(drop=True) - == first_df["acc_rate"].reset_index(drop=True) + after_write_df = after_write_df.sort_values("event_timestamp").reset_index( + drop=True ) - assert np.where( - after_write_df["avg_daily_trips"].reset_index(drop=True) - == first_df["avg_daily_trips"].reset_index(drop=True) + + print(f"After: {after_write_df}\nFirst: {first_df}") + print( + f"After: {after_write_df['conv_rate'].reset_index(drop=True)}\nFirst: {first_df['conv_rate'].reset_index(drop=True)}" ) + assert len(after_write_df) == len(first_df) + for field in ["conv_rate", "acc_rate", "avg_daily_trips"]: + assert np.equal( + after_write_df[field].reset_index(drop=True), + first_df[field].reset_index(drop=True), + ).all(), ( + f"Field: {field}\n" + f"After: {after_write_df[field].reset_index(drop=True)}\n" + f"First: {first_df[field].reset_index(drop=True)}" + ) + second_df = pd.DataFrame.from_dict( { "event_timestamp": [ts + timedelta(hours=5), ts + timedelta(hours=6)], @@ -164,6 +170,7 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour "created": [ts, ts], }, ) + second_df = second_df.astype({"conv_rate": "float32", "acc_rate": "float32"}) store.write_to_offline_store( driver_stats.name, second_df, allow_registry_cache=False @@ -190,18 +197,17 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour ], full_feature_names=False, ).to_df() - + after_write_df = after_write_df.sort_values("event_timestamp").reset_index( + drop=True + ) expected_df = pd.concat([first_df, second_df]) assert len(after_write_df) == len(expected_df) - assert np.where( - after_write_df["conv_rate"].reset_index(drop=True) - == expected_df["conv_rate"].reset_index(drop=True) - ) - assert np.where( - after_write_df["acc_rate"].reset_index(drop=True) - == expected_df["acc_rate"].reset_index(drop=True) - ) - assert np.where( - after_write_df["avg_daily_trips"].reset_index(drop=True) - == expected_df["avg_daily_trips"].reset_index(drop=True) - ) + for field in ["conv_rate", "acc_rate", "avg_daily_trips"]: + assert np.equal( + after_write_df[field].reset_index(drop=True), + expected_df[field].reset_index(drop=True), + ).all(), ( + f"Field: {field}\n" + f"After: {after_write_df[field].reset_index(drop=True)}\n" + f"First: {expected_df[field].reset_index(drop=True)}" + ) From aa2a86acf4b9d2791eb3aef07e829eb2441b5b8d Mon Sep 17 00:00:00 2001 From: Felix Wang Date: Thu, 21 Jul 2022 22:32:07 -0700 Subject: [PATCH 54/73] chore: Add separate `go_feature_serving` flag (#2968) Add separate `go_feature_serving` flag to distinguish between Go feature serving and embedded Go code Signed-off-by: Felix Wang --- .github/workflows/build_wheels.yml | 2 +- sdk/python/feast/cli.py | 2 +- sdk/python/feast/feature_store.py | 4 ++-- sdk/python/feast/repo_config.py | 4 ++++ sdk/python/tests/conftest.py | 4 ++-- sdk/python/tests/integration/e2e/test_go_feature_server.py | 2 +- .../feature_repos/integration_test_repo_config.py | 6 +++--- .../tests/integration/feature_repos/repo_configuration.py | 2 +- 8 files changed, 15 insertions(+), 11 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 4a6bc34d09..c47a8ec5c3 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -245,5 +245,5 @@ jobs: feast apply echo "$TEST_SCRIPT" > run-and-wait.sh pip install cffi - printf "\ngo_feature_retrieval: True" >> feature_store.yaml + printf "\ngo_feature_serving: True" >> feature_store.yaml bash run-and-wait.sh feast serve \ No newline at end of file diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index 86a78e289b..153c1a5ddd 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -668,7 +668,7 @@ def serve_command( if go: # Turn on Go feature retrieval. - store.config.go_feature_retrieval = True + store.config.go_feature_serving = True store.serve(host, port, type_, no_access_log, no_feature_log) diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 4677d475f3..c4ccc9a648 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -1564,7 +1564,7 @@ def _get_online_features( for k, v in entity_values.items() } - # If Go feature server is enabled, send request to it instead of going through regular Python logic + # If the embedded Go code is enabled, send request to it instead of going through regular Python logic. if self.config.go_feature_retrieval: self._lazy_init_go_server() @@ -2217,7 +2217,7 @@ def serve( ) -> None: """Start the feature consumption server locally on a given port.""" type_ = type_.lower() - if self.config.go_feature_retrieval: + if self.config.go_feature_serving: # Start go server instead of python if the flag is enabled self._lazy_init_go_server() enable_logging = ( diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index 2947f10f54..587907b284 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -139,7 +139,11 @@ class RepoConfig(FeastBaseModel): repo_path: Optional[Path] = None + go_feature_serving: Optional[bool] = False + """ If True, use the Go feature server instead of the Python feature server. """ + go_feature_retrieval: Optional[bool] = False + """ If True, use the embedded Go code to retrieve features instead of the Python SDK. """ entity_key_serialization_version: StrictInt = 1 """ Entity key serialization version: This version is used to control what serialization scheme is diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index e296aeedbd..7da791f64f 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -268,7 +268,7 @@ def pytest_generate_tests(metafunc: pytest.Metafunc): ) if "goserver" in markers: - extra_dimensions.append({"go_feature_retrieval": True}) + extra_dimensions.append({"go_feature_serving": True}) configs = [] if offline_stores: @@ -283,7 +283,7 @@ def pytest_generate_tests(metafunc: pytest.Metafunc): **dim, } # temporary Go works only with redis - if config.get("go_feature_retrieval") and ( + if config.get("go_feature_serving") and ( not isinstance(online_store, dict) or online_store["type"] != "redis" ): diff --git a/sdk/python/tests/integration/e2e/test_go_feature_server.py b/sdk/python/tests/integration/e2e/test_go_feature_server.py index 1430d9cdf9..dc83246c2d 100644 --- a/sdk/python/tests/integration/e2e/test_go_feature_server.py +++ b/sdk/python/tests/integration/e2e/test_go_feature_server.py @@ -59,7 +59,7 @@ def initialized_registry(environment, universal_data_sources): def server_port(environment, server_type: str): - if not environment.test_repo_config.go_feature_retrieval: + if not environment.test_repo_config.go_feature_serving: pytest.skip("Only for Go path") fs = environment.feature_store diff --git a/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py b/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py index d2e0f70ba2..4662734383 100644 --- a/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py +++ b/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py @@ -37,7 +37,7 @@ class IntegrationTestRepoConfig: full_feature_names: bool = True infer_features: bool = False python_feature_server: bool = False - go_feature_retrieval: bool = False + go_feature_serving: bool = False def __repr__(self) -> str: if not self.online_store_creator: @@ -61,7 +61,7 @@ def __repr__(self) -> str: f"{self.offline_store_creator.__name__.split('.')[-1].replace('DataSourceCreator', '')}", online_store_type, f"python_fs:{self.python_feature_server}", - f"go_fs:{self.go_feature_retrieval}", + f"go_fs:{self.go_feature_serving}", ] ) @@ -77,6 +77,6 @@ def __eq__(self, other): and self.online_store == other.online_store and self.offline_store_creator == other.offline_store_creator and self.online_store_creator == other.online_store_creator - and self.go_feature_retrieval == other.go_feature_retrieval + and self.go_feature_serving == other.go_feature_serving and self.python_feature_server == other.python_feature_server ) diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 672a5bdcfa..75d6509f3c 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -420,7 +420,7 @@ def construct_test_environment( batch_engine=test_repo_config.batch_engine, repo_path=repo_dir_name, feature_server=feature_server, - go_feature_retrieval=test_repo_config.go_feature_retrieval, + go_feature_serving=test_repo_config.go_feature_serving, ) # Create feature_store.yaml out of the config From 14795198cc609b9ce03100cc0bda357837e4aae8 Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Fri, 22 Jul 2022 15:04:45 -0700 Subject: [PATCH 55/73] chore: Update docs for offline and online stores (#2946) * Update docs Signed-off-by: Kevin Zhang * address review Signed-off-by: Kevin Zhang * Address review Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fux Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang --- .../adding-a-new-offline-store.md | 226 +++++++++++++++--- .../adding-support-for-a-new-online-store.md | 154 +++++++++++- 2 files changed, 331 insertions(+), 49 deletions(-) diff --git a/docs/how-to-guides/adding-a-new-offline-store.md b/docs/how-to-guides/adding-a-new-offline-store.md index 8eeac7bcf4..c548538fce 100644 --- a/docs/how-to-guides/adding-a-new-offline-store.md +++ b/docs/how-to-guides/adding-a-new-offline-store.md @@ -2,13 +2,13 @@ ## Overview -Feast makes adding support for a new offline store (database) easy. Developers can simply implement the [OfflineStore](../../sdk/python/feast/infra/offline\_stores/offline\_store.py#L41) interface to add support for a new store (other than the existing stores like Parquet files, Redshift, and Bigquery). +Feast makes adding support for a new offline store easy. Developers can simply implement the [OfflineStore](../../sdk/python/feast/infra/offline\_stores/offline\_store.py#L41) interface to add support for a new store (other than the existing stores like Parquet files, Redshift, and Bigquery). In this guide, we will show you how to extend the existing File offline store and use in a feature repo. While we will be implementing a specific store, this guide should be representative for adding support for any new offline store. The full working code for this guide can be found at [feast-dev/feast-custom-offline-store-demo](https://github.com/feast-dev/feast-custom-offline-store-demo). -The process for using a custom offline store consists of 4 steps: +The process for using a custom offline store consists of 8 steps: 1. Defining an `OfflineStore` class. 2. Defining an `OfflineStoreConfig` class. @@ -16,6 +16,8 @@ The process for using a custom offline store consists of 4 steps: 4. Defining a `DataSource` class for the offline store 5. Referencing the `OfflineStore` in a feature repo's `feature_store.yaml` file. 6. Testing the `OfflineStore` class. +7. Updating dependencies. +8. Adding documentation. ## 1. Defining an OfflineStore class @@ -23,16 +25,37 @@ The process for using a custom offline store consists of 4 steps: OfflineStore class names must end with the OfflineStore suffix! {% endhint %} +### Contrib offline stores + +New offline stores go in `sdk/python/feast/infra/offline_stores/contrib/`. + +#### What is a contrib plugin? + +- Not guaranteed to implement all interface methods +- Not guaranteed to be stable. +- Should have warnings for users to indicate this is a contrib plugin that is not maintained by the maintainers. + +#### How do I make a contrib plugin an "official" plugin? +To move an offline store plugin out of contrib, you need: +- GitHub actions (i.e `make test-python-integration`) is setup to run all tests against the offline store and pass. +- At least two contributors own the plugin (ideally tracked in our `OWNERS` / `CODEOWNERS` file). + +#### Define the offline store class The OfflineStore class contains a couple of methods to read features from the offline store. Unlike the OnlineStore class, Feast does not manage any infrastructure for the offline store. -There are two methods that deal with reading data from the offline stores`get_historical_features`and `pull_latest_from_table_or_query`. +To fully implement the interface for the offline store, you will need to implement these methods: * `pull_latest_from_table_or_query` is invoked when running materialization (using the `feast materialize` or `feast materialize-incremental` commands, or the corresponding `FeatureStore.materialize()` method. This method pull data from the offline store, and the `FeatureStore` class takes care of writing this data into the online store. * `get_historical_features` is invoked when reading values from the offline store using the `FeatureStore.get_historical_features()` method. Typically, this method is used to retrieve features when training ML models. -* `pull_all_from_table_or_query` is a method that pulls all the data from an offline store from a specified start date to a specified end date. +* (optional) `offline_write_batch` is a method that supports directly pushing a pyarrow table to a feature view. Given a feature view with a specific schema, this function should write the pyarrow table to the batch source defined. More details about the push api can be found [here](docs/reference/data-sources/push.md). This method only needs implementation if you want to support the push api in your offline store. +* (optional) `pull_all_from_table_or_query` is a method that pulls all the data from an offline store from a specified start date to a specified end date. This method is only used for **SavedDatasets** as part of data quality monitoring validation. +* (optional) `write_logged_features` is a method that takes a pyarrow table or a path that points to a parquet file and writes the data to a defined source defined by `LoggingSource` and `LoggingConfig`. This method is only used internally for **SavedDatasets**. {% code title="feast_custom_offline_store/file.py" %} ```python + # Only prints out runtime warnings once. + warnings.simplefilter("once", RuntimeWarning) + def get_historical_features(self, config: RepoConfig, feature_views: List[FeatureView], @@ -40,14 +63,15 @@ There are two methods that deal with reading data from the offline stores`get_hi entity_df: Union[pd.DataFrame, str], registry: Registry, project: str, full_feature_names: bool = False) -> RetrievalJob: - print("Getting historical features from my offline store") - return super().get_historical_features(config, - feature_views, - feature_refs, - entity_df, - registry, - project, - full_feature_names) + """ Perform point-in-time correct join of features onto an entity dataframe(entity key and timestamp). More details about how this should work at https://docs.feast.dev/v/v0.6-branch/user-guide/feature-retrieval#3.-historical-feature-retrieval. + print("Getting historical features from my offline store").""" + warnings.warn( + "This offline store is an experimental feature in alpha development. " + "Some functionality may still be unstable so functionality can change in the future.", + RuntimeWarning, + ) + # Implementation here. + pass def pull_latest_from_table_or_query(self, config: RepoConfig, @@ -58,18 +82,78 @@ There are two methods that deal with reading data from the offline stores`get_hi created_timestamp_column: Optional[str], start_date: datetime, end_date: datetime) -> RetrievalJob: + """ Pulls data from the offline store for use in materialization.""" print("Pulling latest features from my offline store") - return super().pull_latest_from_table_or_query(config, - data_source, - join_key_columns, - feature_name_columns, - timestamp_field=timestamp_field, - created_timestamp_column, - start_date, - end_date) + warnings.warn( + "This offline store is an experimental feature in alpha development. " + "Some functionality may still be unstable so functionality can change in the future.", + RuntimeWarning, + ) + # Implementation here. + pass + + def pull_all_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + start_date: datetime, + end_date: datetime, + ) -> RetrievalJob: + """ Optional method that returns a Retrieval Job for all join key columns, feature name columns, and the event timestamp columns that occur between the start_date and end_date.""" + warnings.warn( + "This offline store is an experimental feature in alpha development. " + "Some functionality may still be unstable so functionality can change in the future.", + RuntimeWarning, + ) + # Implementation here. + pass + + def write_logged_features( + config: RepoConfig, + data: Union[pyarrow.Table, Path], + source: LoggingSource, + logging_config: LoggingConfig, + registry: BaseRegistry, + ): + """ Optional method to have Feast support logging your online features.""" + warnings.warn( + "This offline store is an experimental feature in alpha development. " + "Some functionality may still be unstable so functionality can change in the future.", + RuntimeWarning, + ) + # Implementation here. + pass + + def offline_write_batch( + config: RepoConfig, + feature_view: FeatureView, + table: pyarrow.Table, + progress: Optional[Callable[[int], Any]], + ): + """ Optional method to have Feast support the offline push api for your offline store.""" + warnings.warn( + "This offline store is an experimental feature in alpha development. " + "Some functionality may still be unstable so functionality can change in the future.", + RuntimeWarning, + ) + # Implementation here. + pass + ``` {% endcode %} +### 1.1 Type Mapping + +Most offline stores will have to perform some custom mapping of offline store datatypes to feast value types. +- The function to implement here are `source_datatype_to_feast_value_type` and `get_column_names_and_types` in your `DataSource` class. +* `source_datatype_to_feast_value_type` is used to convert your DataSource's datatypes to feast value types. +* `get_column_names_and_types` retrieves the column names and corresponding datasource types. + +Add any helper functions for type conversion to `sdk/python/feast/type_map.py`. +- Be sure to implement correct type mapping so that Feast can process your feature columns without casting incorrectly that can potentially cause loss of information or incorrect data. + ## 2. Defining an OfflineStoreConfig class Additional configuration may be needed to allow the OfflineStore to talk to the backing store. For example, Redshift needs configuration information like the connection information for the Redshift instance, credentials for connecting to the database, etc. @@ -91,6 +175,8 @@ class CustomFileOfflineStoreConfig(FeastConfigBaseModel): type: Literal["feast_custom_offline_store.file.CustomFileOfflineStore"] \ = "feast_custom_offline_store.file.CustomFileOfflineStore" + + uri: str # URI for your offline store(in this case it would be a path) ``` {% endcode %} @@ -98,11 +184,18 @@ This configuration can be specified in the `feature_store.yaml` as follows: {% code title="feature_repo/feature_store.yaml" %} ```yaml -type: feast_custom_offline_store.file.CustomFileOfflineStore +project: my_project +registry: data/registry.db +provider: local +offline_store: + type: feast_custom_offline_store.file.CustomFileOfflineStore + uri: +online_store: + path: data/online_store.db ``` {% endcode %} -This configuration information is available to the methods of the OfflineStore, via the`config: RepoConfig` parameter which is passed into the methods of the OfflineStore interface, specifically at the `config.offline_store` field of the `config` parameter. +This configuration information is available to the methods of the OfflineStore, via the `config: RepoConfig` parameter which is passed into the methods of the OfflineStore interface, specifically at the `config.offline_store` field of the `config` parameter. This fields in the `feature_store.yaml` should map directly to your `OfflineStoreConfig` class that is detailed above in Section 2. {% code title="feast_custom_offline_store/file.py" %} ```python @@ -113,7 +206,11 @@ This configuration information is available to the methods of the OfflineStore, entity_df: Union[pd.DataFrame, str], registry: Registry, project: str, full_feature_names: bool = False) -> RetrievalJob: - + warnings.warn( + "This offline store is an experimental feature in alpha development. " + "Some functionality may still be unstable so functionality can change in the future.", + RuntimeWarning, + ) offline_store_config = config.offline_store assert isinstance(offline_store_config, CustomFileOfflineStoreConfig) store_type = offline_store_config.type @@ -128,6 +225,8 @@ Custom offline stores may need to implement their own instances of the `Retrieva The `RetrievalJob` interface exposes two methods - `to_df` and `to_arrow`. The expectation is for the retrieval job to be able to return the rows read from the offline store as a parquet DataFrame, or as an Arrow table respectively. +Users who want to have their offline store support **scalable batch materialization** for online use cases (detailed in this [RFC](https://docs.google.com/document/d/1J7XdwwgQ9dY_uoV9zkRVGQjK9Sy43WISEW6D5V9qzGo/edit#heading=h.9gaqqtox9jg6)) will also need to implement `to_remote_storage` to distribute the reading and writing of offline store records to blob storage (such as S3). This may be used by a custom [Materialization Engine](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/infra/materialization/batch_materialization_engine.py#L72) to parallelize the materialization of data by processing it in chunks. If this is not implemented, Feast will default to local materialization (pulling all records into memory to materialize). + {% code title="feast_custom_offline_store/file.py" %} ```python class CustomFileRetrievalJob(RetrievalJob): @@ -148,6 +247,10 @@ class CustomFileRetrievalJob(RetrievalJob): print("Getting a pandas DataFrame from a File is easy!") df = self.evaluation_function() return pyarrow.Table.from_pandas(df) + + def to_remote_storage(self): + # Optional method to write to an offline storage location to support scalable batch materialization. + pass ``` {% endcode %} @@ -171,6 +274,9 @@ class CustomFileDataSource(FileSource): created_timestamp_column: Optional[str] = "", date_partition_column: Optional[str] = "", ): + "Some functionality may still be unstable so functionality can change in the future.", + RuntimeWarning, + ) super(CustomFileDataSource, self).__init__( timestamp_field=timestamp_field, created_timestamp_column, @@ -225,11 +331,12 @@ project: test_custom registry: data/registry.db provider: local offline_store: + # Make sure to specify the type as the fully qualified path that Feast can import. type: feast_custom_offline_store.file.CustomFileOfflineStore ``` {% endcode %} -If additional configuration for the offline store is **not **required, then we can omit the other fields and only specify the `type` of the offline store class as the value for the `offline_store`. +If additional configuration for the offline store is **not** required, then we can omit the other fields and only specify the `type` of the offline store class as the value for the `offline_store`. {% code title="feature_repo/feature_store.yaml" %} ```yaml @@ -244,7 +351,7 @@ Finally, the custom data source class can be use in the feature repo to define a {% code title="feature_repo/repo.py" %} ```python -pdriver_hourly_stats = CustomFileDataSource( +driver_hourly_stats = CustomFileDataSource( path="feature_repo/data/driver_stats.parquet", timestamp_field="event_timestamp", created_timestamp_column="created", @@ -260,23 +367,70 @@ driver_hourly_stats_view = FeatureView( ## 6. Testing the OfflineStore class -Even if you have created the `OfflineStore` class in a separate repo, you can still test your implementation against the Feast test suite, as long as you have Feast as a submodule in your repo. In the Feast submodule, we can run all the unit tests with: +### Integrating with the integration test suite and unit test suite. -``` -make test -``` +Even if you have created the `OfflineStore` class in a separate repo, you can still test your implementation against the Feast test suite, as long as you have Feast as a submodule in your repo. + +1. In order to test against the test suite, you need to create a custom `DataSourceCreator` that implement our testing infrastructure methods, `create_data_source` and optionally, `created_saved_dataset_destination`. + * `create_data_source` should create a datasource based on the dataframe passed in. It may be implemented by uploading the contents of the dataframe into the offline store and returning a datasource object pointing to that location. See `BigQueryDataSourceCreator` for an implementation of a data source creator. + * `created_saved_dataset_destination` is invoked when users need to save the dataset for use in data validation. This functionality is still in alpha and is **optional**. + +2. Make sure that your offline store doesn't break any unit tests first by running: + ``` + make test-python + ``` + +3. Next, set up your offline store to run the universal integration tests. These are integration tests specifically intended to test offline and online stores against Feast API functionality, to ensure that the Feast APIs works with your offline store. + - Feast parametrizes integration tests using the `FULL_REPO_CONFIGS` variable defined in `sdk/python/tests/integration/feature_repos/repo_configuration.py` which stores different offline store classes for testing. + - To overwrite the default configurations to use your own offline store, you can simply create your own file that contains a `FULL_REPO_CONFIGS` dictionary, and point Feast to that file by setting the environment variable `FULL_REPO_CONFIGS_MODULE` to point to that file. The module should add new `IntegrationTestRepoConfig` classes to the `AVAILABLE_OFFLINE_STORES` by defining an offline store that you would like Feast to test with. + + A sample `FULL_REPO_CONFIGS_MODULE` looks something like this: + + ```python + # Should go in sdk/python/feast/infra/offline_stores/contrib/postgres_repo_configuration.py + from feast.infra.offline_stores.contrib.postgres_offline_store.tests.data_source import ( + PostgreSQLDataSourceCreator, + ) -The universal tests, which are integration tests specifically intended to test offline and online stores, can be run with: + AVAILABLE_OFFLINE_STORES = [("local", PostgreSQLDataSourceCreator)] + ``` + +4. You should swap out the `FULL_REPO_CONFIGS` environment variable and run the integration tests against your offline store. In the example repo, the file that overwrites `FULL_REPO_CONFIGS` is `feast_custom_offline_store/feast_tests.py`, so you would run: + + ```bash + export FULL_REPO_CONFIGS_MODULE='feast_custom_offline_store.feast_tests' + make test-python-universal + ``` + + If the integration tests fail, this indicates that there is a mistake in the implementation of this offline store! + +5. Remember to add your datasource to `repo_config.py` similar to how we added `spark`, `trino`, etc, to the dictionary `OFFLINE_STORE_CLASS_FOR_TYPE` and add the necessary configuration to `repo_configuration.py`. Namely, `AVAILABLE_OFFLINE_STORES` should load your repo configuration module. + +### 7. Dependencies + +Add any dependencies for your offline store to our `sdk/python/setup.py` under a new `__REQUIRED` list with the packages and add it to the setup script so that if your offline store is needed, users can install the necessary python packages. These packages should be defined as extras so that they are not installed by users by default. +You will need to regenerate our requirements files. To do this, create separate pyenv environments for python 3.8, 3.9, and 3.10. In each environment, run the following commands: ``` -make test-python-universal +export PYTHON= +make lock-python-ci-dependencies ``` -The unit tests should succeed, but the universal tests will likely fail. The tests are parametrized based on the `FULL_REPO_CONFIGS` variable defined in `sdk/python/tests/integration/feature_repos/repo_configuration.py`. To overwrite these configurations, you can simply create your own file that contains a `FULL_REPO_CONFIGS`, and point Feast to that file by setting the environment variable `FULL_REPO_CONFIGS_MODULE` to point to that file. The main challenge there will be to write a `DataSourceCreator` for the offline store. In this repo, the file that overwrites `FULL_REPO_CONFIGS` is `feast_custom_offline_store/feast_tests.py`, so you would run -``` -export FULL_REPO_CONFIGS_MODULE='feast_custom_offline_store.feast_tests' -make test-python-universal +### 8. Add Documentation + +Remember to add documentation for your offline store. + +1. Add a new markdown file to `docs/reference/offline-stores/` and `docs/reference/data-sources/`. Use these files to document your offline store functionality similar to how the other offline stores are documented. +2. You should also add a reference in `docs/reference/data-sources/README.md` and `docs/SUMMARY.md` to these markdown files. + +**NOTE**: Be sure to document the following things about your offline store: +- How to create the datasource and most what configuration is needed in the `feature_store.yaml` file in order to create the datasource. +- Make sure to flag that the datasource is in alpha development. +- Add some documentation on what the data model is for the specific offline store for more clarity. +- Finally, generate the python code docs by running: + +```bash +make build-sphinx ``` -to test the offline store against the Feast universal tests. You should notice that some of the tests actually fail; this indicates that there is a mistake in the implementation of this offline store! diff --git a/docs/how-to-guides/adding-support-for-a-new-online-store.md b/docs/how-to-guides/adding-support-for-a-new-online-store.md index fee47945bf..d1f5986f18 100644 --- a/docs/how-to-guides/adding-support-for-a-new-online-store.md +++ b/docs/how-to-guides/adding-support-for-a-new-online-store.md @@ -8,12 +8,15 @@ In this guide, we will show you how to integrate with MySQL as an online store. The full working code for this guide can be found at [feast-dev/feast-custom-online-store-demo](https://github.com/feast-dev/feast-custom-online-store-demo). -The process of using a custom online store consists of 3 steps: + +The process of using a custom online store consists of 6 steps: 1. Defining the `OnlineStore` class. 2. Defining the `OnlineStoreConfig` class. 3. Referencing the `OnlineStore` in a feature repo's `feature_store.yaml` file. 4. Testing the `OnlineStore` class. +5. Update dependencies. +6. Add documentation. ## 1. Defining an OnlineStore class @@ -21,6 +24,21 @@ The process of using a custom online store consists of 3 steps: OnlineStore class names must end with the OnlineStore suffix! {% endhint %} +### Contrib online stores + +New online stores go in `sdk/python/feast/infra/online_stores/contrib/`. + +#### What is a contrib plugin? + +- Not guaranteed to implement all interface methods +- Not guaranteed to be stable. +- Should have warnings for users to indicate this is a contrib plugin that is not maintained by the maintainers. + +#### How do I make a contrib plugin an "official" plugin? +To move an online store plugin out of contrib, you need: +- GitHub actions (i.e `make test-python-integration`) is setup to run all tests against the online store and pass. +- At least two contributors own the plugin (ideally tracked in our `OWNERS` / `CODEOWNERS` file). + The OnlineStore class broadly contains two sets of methods * One set deals with managing infrastructure that the online store needed for operations @@ -40,6 +58,9 @@ The `teardown` method should be used to perform any clean-up operations. `teardo {% code title="feast_custom_online_store/mysql.py" %} ```python +# Only prints out runtime warnings once. +warnings.simplefilter("once", RuntimeWarning) + def update( self, config: RepoConfig, @@ -50,8 +71,13 @@ def update( partial: bool, ): """ - An example of creating manging the tables needed for a mysql-backed online store. + An example of creating managing the tables needed for a mysql-backed online store. """ + warnings.warn( + "This online store is an experimental feature in alpha development. " + "Some functionality may still be unstable so functionality can change in the future.", + RuntimeWarning, + ) conn = self._get_conn(config) cur = conn.cursor(buffered=True) @@ -78,9 +104,11 @@ def teardown( tables: Sequence[Union[FeatureTable, FeatureView]], entities: Sequence[Entity], ): - """ - - """ + warnings.warn( + "This online store is an experimental feature in alpha development. " + "Some functionality may still be unstable so functionality can change in the future.", + RuntimeWarning, + ) conn = self._get_conn(config) cur = conn.cursor(buffered=True) project = config.project @@ -102,6 +130,9 @@ There are two methods that deal with writing data to and from the online stores. {% code title="feast_custom_online_store/mysql.py" %} ```python +# Only prints out runtime warnings once. +warnings.simplefilter("once", RuntimeWarning) + def online_write_batch( self, config: RepoConfig, @@ -111,6 +142,11 @@ def online_write_batch( ], progress: Optional[Callable[[int], Any]], ) -> None: + warnings.warn( + "This online store is an experimental feature in alpha development. " + "Some functionality may still be unstable so functionality can change in the future.", + RuntimeWarning, + ) conn = self._get_conn(config) cur = conn.cursor(buffered=True) @@ -135,6 +171,11 @@ def online_read( entity_keys: List[EntityKeyProto], requested_features: Optional[List[str]] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: + warnings.warn( + "This online store is an experimental feature in alpha development. " + "Some functionality may still be unstable so functionality can change in the future.", + RuntimeWarning, + ) conn = self._get_conn(config) cur = conn.cursor(buffered=True) @@ -166,6 +207,16 @@ def online_read( ``` {% endcode %} +### 1.3 Type Mapping + +Most online stores will have to perform some custom mapping of online store datatypes to feast value types. +- The function to implement here are `source_datatype_to_feast_value_type` and `get_column_names_and_types` in your `DataSource` class. +* `source_datatype_to_feast_value_type` is used to convert your DataSource's datatypes to feast value types. +* `get_column_names_and_types` retrieves the column names and corresponding datasource types. + +Add any helper functions for type conversion to `sdk/python/feast/type_map.py`. +- Be sure to implement correct type mapping so that Feast can process your feature columns without casting incorrectly that can potentially cause loss of information or incorrect data. + ## 2. Defining an OnlineStoreConfig class Additional configuration may be needed to allow the OnlineStore to talk to the backing store. For example, MySQL may need configuration information like the host at which the MySQL instance is running, credentials for connecting to the database, etc. @@ -243,7 +294,8 @@ To use our MySQL online store, we can use the following `feature_store.yaml`: project: test_custom registry: data/registry.db provider: local -online_store: +online_store: + # Make sure to specify the type as the fully qualified path that Feast can import. type: feast_custom_online_store.mysql.MySQLOnlineStore user: foo password: bar @@ -263,23 +315,99 @@ online_store: feast_custom_online_store.mysql.MySQLOnlineStore ## 4. Testing the OnlineStore class -Even if you have created the `OnlineStore` class in a separate repo, you can still test your implementation against the Feast test suite, as long as you have Feast as a submodule in your repo. In the Feast submodule, we can run all the unit tests with: +### Integrating with the integration test suite and unit test suite. + +Even if you have created the `OnlineStore` class in a separate repo, you can still test your implementation against the Feast test suite, as long as you have Feast as a submodule in your repo. + +1. In the Feast submodule, we can run all the unit tests and make sure they pass: + ``` + make test-python + ``` + +2. The universal tests, which are integration tests specifically intended to test offline and online stores, should be run against Feast to ensure that the Feast APIs works with your online store. + - Feast parametrizes integration tests using the `FULL_REPO_CONFIGS` variable defined in `sdk/python/tests/integration/feature_repos/repo_configuration.py` which stores different online store classes for testing. + - To overwrite these configurations, you can simply create your own file that contains a `FULL_REPO_CONFIGS` variable, and point Feast to that file by setting the environment variable `FULL_REPO_CONFIGS_MODULE` to point to that file. + +A sample `FULL_REPO_CONFIGS_MODULE` looks something like this: + +{% code title="sdk/python/feast/infra/online_stores/contrib/postgres_repo_configuration.py" %} +```python +from feast.infra.offline_stores.contrib.postgres_offline_store.tests.data_source import ( + PostgreSQLDataSourceCreator, +) + +AVAILABLE_ONLINE_STORES = {"postgres": (None, PostgreSQLDataSourceCreator)} ``` -make test +{% endcode %} + + +If you are planning to start the online store up locally(e.g spin up a local Redis Instance) for testing, then the dictionary entry should be something like: + + +```python +{ + "sqlite": ({"type": "sqlite"}, None), + # Specifies sqlite as the online store. The `None` object specifies to not use a containerized docker container. +} ``` -The universal tests, which are integration tests specifically intended to test offline and online stores, can be run with: +If you are planning instead to use a Dockerized container to run your tests against your online store, you can define a `OnlineStoreCreator` and replace the `None` object above with your `OnlineStoreCreator` class. + + +If you create a containerized docker image for testing, developers who are trying to test with your online store will not have to spin up their own instance of the online store for testing. An example of an `OnlineStoreCreator` is shown below: + +{% code title="sdk/python/tests/integration/feature_repos/universal/online_store/redis.py" %} +```python +class RedisOnlineStoreCreator(OnlineStoreCreator): + def __init__(self, project_name: str, **kwargs): + super().__init__(project_name) + + def create_online_store(self) -> Dict[str, str]: + self.container.start() + log_string_to_wait_for = "Ready to accept connections" + wait_for_logs( + container=self.container, predicate=log_string_to_wait_for, timeout=10 + ) + self.container.stop() ``` +{% endcode %} + +3\. You should swap out the `FULL_REPO_CONFIGS` environment variable and run the integration tests against your online store. In the example repo, the file that overwrites `FULL_REPO_CONFIGS` is `feast_custom_online_store/feast_tests.py`, so you would run: + +```bash +export FULL_REPO_CONFIGS_MODULE='feast_custom_online_store.feast_tests' make test-python-universal ``` -The unit tests should succeed, but the universal tests will likely fail. The tests are parametrized based on the `FULL_REPO_CONFIGS` variable defined in `sdk/python/tests/integration/feature_repos/repo_configuration.py`. To overwrite these configurations, you can simply create your own file that contains a `FULL_REPO_CONFIGS`, and point Feast to that file by setting the environment variable `FULL_REPO_CONFIGS_MODULE` to point to that file. In this repo, the file that overwrites `FULL_REPO_CONFIGS` is `feast_custom_online_store/feast_tests.py`, so you would run +- If there are some tests that fail, this indicates that there is a mistake in the implementation of this online store! + + +### 5. Add Dependencies + +Add any dependencies for your online store to our `sdk/python/setup.py` under a new `_REQUIRED` list with the packages and add it to the setup script so that if your online store is needed, users can install the necessary python packages. These packages should be defined as extras so that they are not installed by users by default. +- You will need to regenerate our requirements files. To do this, create separate pyenv environments for python 3.8, 3.9, and 3.10. In each environment, run the following commands: ``` -export FULL_REPO_CONFIGS_MODULE='feast_custom_online_store.feast_tests' -make test-python-universal +export PYTHON= +make lock-python-ci-dependencies +``` + + +### 6. Add Documentation + +Remember to add the documentation for your online store. +1. Add a new markdown file to `docs/reference/online-stores/`. +2. You should also add a reference in `docs/reference/online-stores/README.md` and `docs/SUMMARY.md`. Add a new markdown document to document your online store functionality similar to how the other online stores are documented. + +**NOTE**:Be sure to document the following things about your online store: +- Be sure to cover how to create the datasource and what configuration is needed in the `feature_store.yaml` file in order to create the datasource. +- Make sure to flag that the online store is in alpha development. +- Add some documentation on what the data model is for the specific online store for more clarity. +- Finally, generate the python code docs by running: + +```bash +make build-sphinx ``` -to test the MySQL online store against the Feast universal tests. You should notice that some of the tests actually fail; this indicates that there is a mistake in the implementation of this online store! From a15fcb463d730659576877d86dae0be6a6a5a98b Mon Sep 17 00:00:00 2001 From: Felix Wang Date: Fri, 22 Jul 2022 15:53:45 -0700 Subject: [PATCH 56/73] docs: Fix docs for Go feature retrieval (#2967) * Docs Signed-off-by: Felix Wang * Update production docs Signed-off-by: Felix Wang * Update page name Signed-off-by: Felix Wang * Update roadmap Signed-off-by: Felix Wang --- docs/SUMMARY.md | 3 +- .../fetching-java-features-k8s.md | 15 --- .../running-feast-in-production.md | 24 ++--- .../feature-servers/go-feature-retrieval.md | 85 ----------------- .../feature-servers/go-feature-server.md | 93 +++++++++++++++++++ .../feature-servers/python-feature-server.md | 29 +++--- docs/roadmap.md | 12 +-- 7 files changed, 124 insertions(+), 137 deletions(-) delete mode 100644 docs/how-to-guides/fetching-java-features-k8s.md delete mode 100644 docs/reference/feature-servers/go-feature-retrieval.md create mode 100644 docs/reference/feature-servers/go-feature-server.md diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index 6a216a332f..cf10a528bd 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -51,7 +51,6 @@ * [Load data into the online store](how-to-guides/feast-snowflake-gcp-aws/load-data-into-the-online-store.md) * [Read features from the online store](how-to-guides/feast-snowflake-gcp-aws/read-features-from-the-online-store.md) * [Running Feast in production](how-to-guides/running-feast-in-production.md) -* [Deploying a Java feature server on Kubernetes](how-to-guides/fetching-java-features-k8s.md) * [Upgrading from Feast 0.9](https://docs.google.com/document/u/1/d/1AOsr\_baczuARjCpmZgVd8mCqTF4AZ49OEyU4Cn-uTT0/edit) * [Adding a custom provider](how-to-guides/creating-a-custom-provider.md) * [Adding a custom batch materialization engine](how-to-guides/creating-a-custom-materialization-engine.md) @@ -93,7 +92,7 @@ * [.feastignore](reference/feature-repository/feast-ignore.md) * [Feature servers](reference/feature-servers/README.md) * [Python feature server](reference/feature-servers/python-feature-server.md) - * [Go-based feature retrieval](reference/feature-servers/go-feature-retrieval.md) + * [Go feature server](reference/feature-servers/go-feature-server.md) * [\[Alpha\] Web UI](reference/alpha-web-ui.md) * [\[Alpha\] Data quality monitoring](reference/dqm.md) * [\[Alpha\] On demand feature view](reference/alpha-on-demand-feature-view.md) diff --git a/docs/how-to-guides/fetching-java-features-k8s.md b/docs/how-to-guides/fetching-java-features-k8s.md deleted file mode 100644 index 1aa6abd52b..0000000000 --- a/docs/how-to-guides/fetching-java-features-k8s.md +++ /dev/null @@ -1,15 +0,0 @@ -# How to set up a Java feature server - -This tutorial guides you on how to: - -* Define features and data sources in Feast using the Feast CLI -* Materialize features to a Redis cluster deployed on Kubernetes. -* Deploy a Feast Java feature server into a Kubernetes cluster using the Feast helm charts -* Retrieve features using the gRPC API exposed by the Feast Java server - -Try it and let us know what you think! - -| ![](../.gitbook/assets/github-mark-32px.png)[ View guide in Github](../../examples/java-demo/README.md) | -|:--------------------------------------------------------------------------------------------------------| - - diff --git a/docs/how-to-guides/running-feast-in-production.md b/docs/how-to-guides/running-feast-in-production.md index 6023c5ac66..eba3507f7d 100644 --- a/docs/how-to-guides/running-feast-in-production.md +++ b/docs/how-to-guides/running-feast-in-production.md @@ -242,14 +242,12 @@ This service will provide an HTTP API with JSON I/O, which can be easily used wi [Read more about this feature](../reference/alpha-aws-lambda-feature-server.md) -### 4.3. Java based Feature Server deployed on Kubernetes +### 4.3. Go feature server deployed on Kubernetes -For users with very latency-sensitive and high QPS use-cases, Feast offers a high-performance Java feature server. -Besides the benefits of running on JVM, this implementation also provides a gRPC API, which guarantees good connection utilization and -small request / response body size (compared to JSON). -You will need the Feast Java SDK to retrieve features from this service. This SDK wraps all the gRPC logic for you and provides more convenient APIs. +For users with very latency-sensitive and high QPS use-cases, Feast offers a high-performance [Go feature server](../reference/feature-servers/go-feature-server.md). +It can use either HTTP or gRPC. -The Java based feature server can be deployed to Kubernetes cluster via Helm charts in a few simple steps: +The Go feature server can be deployed to a Kubernetes cluster via Helm charts in a few simple steps: 1. Install [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) and [helm 3](https://helm.sh/) 2. Add the Feast Helm repository and download the latest charts: @@ -259,17 +257,19 @@ helm repo update ``` 3. Run Helm Install ``` -helm install feast-release feast-charts/feast \ +helm install feast-release feast-charts/feast-python-server \ --set global.registry.path=s3://feast/registries/prod \ --set global.project= ``` -This chart will deploy two services: `feature-server` and `transformation-service`. -Both must have read access to the registry file on cloud storage. Both will keep a copy of the registry in their memory and periodically refresh it, so expect some delays in update propagation in exchange for better performance. +This chart will deploy a single service. +The service must have read access to the registry file on cloud storage. +It will keep a copy of the registry in their memory and periodically refresh it, so expect some delays in update propagation in exchange for better performance. +In order for the Go feature server to be enabled, you should set `go_feature_serving: True` in the `feature_store.yaml`. #### Load balancing -The next step would be to install an L7 Load Balancer (eg, [Envoy](https://www.envoyproxy.io/)) in front of the Java feature server. +The next step would be to install an L7 Load Balancer (eg, [Envoy](https://www.envoyproxy.io/)) in front of the Go feature server. For seamless integration with Kubernetes (including services created by Feast Helm chart) we recommend using [Istio](https://istio.io/) as Envoy's orchestrator. ## 5. Ingesting features from a stream source @@ -344,8 +344,8 @@ Summarizing it all together we want to show several options of architecture that * Feast SDK is being triggered by CI (eg, Github Actions). It applies the latest changes from the feature repo to the Feast registry * Airflow manages materialization jobs to ingest data from DWH to the online store periodically * For the stream ingestion Feast Python SDK is used in the existing Spark / Beam pipeline -* Online features are served via either a Python feature server or a high performance Java feature server - * Both the Java feature server and the transformation server are deployed on Kubernetes cluster (via Helm charts) +* Online features are served via either a Python feature server or a high performance Go feature server + * The Go feature server can be deployed on a Kubernetes cluster (via Helm charts) * Feast Python SDK is called locally to generate a training dataset ![From Repository to Production: Feast Production Architecture](production-spark.png) diff --git a/docs/reference/feature-servers/go-feature-retrieval.md b/docs/reference/feature-servers/go-feature-retrieval.md deleted file mode 100644 index 92a9ca2ebe..0000000000 --- a/docs/reference/feature-servers/go-feature-retrieval.md +++ /dev/null @@ -1,85 +0,0 @@ -# Go-based Feature Retrieval - -## Overview - -The Go Feature Retrieval component is a Go implementation of the core feature serving logic, embedded in the Python SDK. It supports retrieval of feature references, feature services, and on demand feature views, and can be used either through the Python SDK or the [Python feature server](python-feature-server.md). - -Currently, this component only supports online serving and does not have an offline component including APIs to create feast feature repositories or apply configuration to the registry to facilitate online materialization. It also does not expose its own dedicated cli to perform feast actions. Furthermore, this component is only meant to expose an online serving API that can be called through the python SDK to facilitate faster online feature retrieval. - -The Go Feature Retrieval component currently only supports Redis and Sqlite as online stores; support for other online stores will be added soon. Initial benchmarks indicate that it is significantly faster than the Python feature server for online feature retrieval. We plan to release a more comprehensive set of benchmarks. For more details, see the [RFC](https://docs.google.com/document/d/1Lgqv6eWYFJgQ7LA_jNeTh8NzOPhqI9kGTeyESRpNHnE). - -## Installation - -As long as you are running macOS or linux, on x86, with python version 3.7-3.10, the go component comes pre-compiled when you install feast. - -However, some additional dependencies are required for Go <-> Python interoperability. To install these dependencies run the following command in your console: -``` -pip install feast[go] -``` -You will also have to install the apache-arrow c++ libraries, since we use the cgo memory allocator to prevent memory from being incorrectly garbage collected, detailed in these [docs](https://pkg.go.dev/github.com/apache/arrow/go/arrow@v0.0.0-20211112161151-bc219186db40/cdata#ExportArrowRecordBatch). - -For macos, run `brew install apache-arrow`. -For linux users, you have to install `libarrow-dev`. -``` -sudo apt update -sudo apt install -y -V ca-certificates lsb-release wget -wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb -sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb -sudo apt update -sudo apt install -y -V libarrow-dev # For C++ -``` - -For developers, if you want to build from source, run `make compile-go-lib` to build and compile the go server. In order to build the go binaries, you will need to install the `apache-arrow` c++ libraries. - -## Usage - -To enable the Go online feature retrieval component, set `go_feature_retrieval: True` in your `feature_store.yaml`. This will direct all online feature retrieval to Go instead of Python. This flag will be enabled by default in the future. - -{% code title="feature_store.yaml" %} -```yaml -project: my_feature_repo -registry: data/registry.db -provider: local -online_store: - type: redis - connection_string: "localhost:6379" -go_feature_retrieval: True -``` -{% endcode %} - -## Feature logging - -Go feature server can log all requested entities and served features to a configured destination inside an offline store. -This allows users to create new datasets from features served online. Those datasets could be used for future trainings or for -feature validations. To enable feature logging we need to edit `feature_store.yaml`: -```yaml -project: my_feature_repo -registry: data/registry.db -provider: local -online_store: - type: redis - connection_string: "localhost:6379" -go_feature_retrieval: True -feature_server: - feature_logging: - enable: True -``` - -Feature logging configuration in `feature_store.yaml` also allows to tweak some low-level parameters to achieve the best performance: -```yaml -feature_server: - feature_logging: - enable: True - flush_interval_secs: 300 - write_to_disk_interval_secs: 30 - emit_timeout_micro_secs: 10000 - queue_capacity: 10000 -``` -All these parameters are optional. - -## Future/Current Work - -The Go feature retrieval online feature logging for Data Quality Monitoring is currently in development. More information can be found [here](https://docs.google.com/document/d/110F72d4NTv80p35wDSONxhhPBqWRwbZXG4f9mNEMd98/edit#heading=h.9gaqqtox9jg6). - -We also plan on adding support for the Java feature server (e.g. the capability to call into the Go component and execute Java UDFs). - diff --git a/docs/reference/feature-servers/go-feature-server.md b/docs/reference/feature-servers/go-feature-server.md new file mode 100644 index 0000000000..f83b765c3a --- /dev/null +++ b/docs/reference/feature-servers/go-feature-server.md @@ -0,0 +1,93 @@ +# Go feature server + +## Overview + +The Go feature server is an HTTP/gRPC endpoint that serves features. +It is written in Go, and is therefore significantly faster than the Python feature server. +See this [blog post](https://feast.dev/blog/go-feature-server-benchmarks/) for more details on the comparison between Python and Go. +In general, we recommend the Go feature server for all production use cases that require extremely low-latency feature serving. +Currently only the Redis and SQLite online stores are supported. + +## CLI + +By default, the Go feature server is turned off. +To turn it on you can add `go_feature_serving: True` to your `feature_store.yaml`: + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: local +online_store: + type: redis + connection_string: "localhost:6379" +go_feature_serving: True +``` +{% endcode %} + +Then the `feast serve` CLI command will start the Go feature server. +As with Python, the Go feature server uses port 6566 by default; the port be overridden with a `--port` flag. +Moreover, the server uses HTTP by default, but can be set to use gRPC with `--type=grpc`. + +Alternatively, if you wish to experiment with the Go feature server instead of permanently turning it on, you can just run `feast serve --go`. + +## Installation + +The Go component comes pre-compiled when you install Feast with Python versions 3.8-3.10 on macOS or Linux (on x86). +In order to install the additional Python dependencies, you should install Feast with +``` +pip install feast[go] +``` +You must also install the Apache Arrow C++ libraries. +This is because the Go feature server uses the cgo memory allocator from the Apache Arrow C++ library for interoperability between Go and Python, to prevent memory from being accidentally garbage collected when executing on-demand feature views. +You can read more about the usage of the cgo memory allocator in these [docs](https://pkg.go.dev/github.com/apache/arrow/go/arrow@v0.0.0-20211112161151-bc219186db40/cdata#ExportArrowRecordBatch). + +For macOS, run `brew install apache-arrow`. +For linux users, you have to install `libarrow-dev`. +``` +sudo apt update +sudo apt install -y -V ca-certificates lsb-release wget +wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb +sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb +sudo apt update +sudo apt install -y -V libarrow-dev # For C++ +``` +For developers, if you want to build from source, run `make compile-go-lib` to build and compile the go server. In order to build the go binaries, you will need to install the `apache-arrow` c++ libraries. + +## Alpha features + +### Feature logging + +The Go feature server can log all requested entities and served features to a configured destination inside an offline store. +This allows users to create new datasets from features served online. Those datasets could be used for future trainings or for +feature validations. To enable feature logging we need to edit `feature_store.yaml`: +```yaml +project: my_feature_repo +registry: data/registry.db +provider: local +online_store: + type: redis + connection_string: "localhost:6379" +go_feature_serving: True +feature_server: + feature_logging: + enable: True +``` + +Feature logging configuration in `feature_store.yaml` also allows to tweak some low-level parameters to achieve the best performance: +```yaml +feature_server: + feature_logging: + enable: True + flush_interval_secs: 300 + write_to_disk_interval_secs: 30 + emit_timeout_micro_secs: 10000 + queue_capacity: 10000 +``` +All these parameters are optional. + +### Python SDK retrieval + +The logic for the Go feature server can also be used to retrieve features during a Python `get_online_features` call. +To enable this behavior, you must add `go_feature_retrieval: True` to your `feature_store.yaml`. +You must also have all the dependencies installed as detailed above. diff --git a/docs/reference/feature-servers/python-feature-server.md b/docs/reference/feature-servers/python-feature-server.md index ecc12dd12d..2646c28ef4 100644 --- a/docs/reference/feature-servers/python-feature-server.md +++ b/docs/reference/feature-servers/python-feature-server.md @@ -2,23 +2,22 @@ ## Overview -The feature server is an HTTP endpoint that serves features with JSON I/O. This enables users to write + read features from Feast online stores using any programming language that can make HTTP requests. +The Python feature server is an HTTP endpoint that serves features with JSON I/O. This enables users to write and read features from the online store using any programming language that can make HTTP requests. ## CLI -There is a CLI command that starts the server: `feast serve`. By default, Feast uses port 6566; the port be overridden by a `--port` flag. +There is a CLI command that starts the server: `feast serve`. By default, Feast uses port 6566; the port be overridden with a `--port` flag. ## Deploying as a service -One can also deploy a feature server by building a docker image that bundles in the project's `feature_store.yaml`. See [helm chart](https://github.com/feast-dev/feast/blob/master/infra/charts/feast-python-server) for example. - -A [remote feature server](../alpha-aws-lambda-feature-server.md) on AWS Lambda is available. A remote feature server on GCP Cloud Run is currently being developed. +One can deploy a feature server by building a docker image that bundles in the project's `feature_store.yaml`. See this [helm chart](https://github.com/feast-dev/feast/blob/master/infra/charts/feast-python-server) for an example. +A [remote feature server](../alpha-aws-lambda-feature-server.md) on AWS Lambda is also available. ## Example ### Initializing a feature server -Here's the local feature server usage example with the local template: +Here's an example of how to start the Python feature server with a local feature repo: ```bash $ feast init feature_repo @@ -27,9 +26,11 @@ Creating a new Feast repository in /home/tsotne/feast/feature_repo. $ cd feature_repo $ feast apply -Registered entity driver_id -Registered feature view driver_hourly_stats -Deploying infrastructure for driver_hourly_stats +Created entity driver +Created feature view driver_hourly_stats +Created feature service driver_activity + +Created sqlite table feature_repo_driver_hourly_stats $ feast materialize-incremental $(date +%Y-%m-%d) Materializing 1 feature views to 2021-09-09 17:00:00-07:00 into the sqlite online store. @@ -38,8 +39,6 @@ driver_hourly_stats from 2021-09-09 16:51:08-07:00 to 2021-09-09 17:00:00-07:00: 100%|████████████████████████████████████████████████████████████████| 5/5 [00:00<00:00, 295.24it/s] $ feast serve -This is an experimental feature. It's intended for early testing and feedback, and could change without warnings in future releases. -INFO: Started server process [8889] 09/10/2021 10:42:11 AM INFO:Started server process [8889] INFO: Waiting for application startup. 09/10/2021 10:42:11 AM INFO:Waiting for application startup. @@ -49,7 +48,7 @@ INFO: Uvicorn running on http://127.0.0.1:6566 (Press CTRL+C to quit) 09/10/2021 10:42:11 AM INFO:Uvicorn running on http://127.0.0.1:6566 (Press CTRL+C to quit) ``` -### Retrieving features from the online store +### Retrieving features After the server starts, we can execute cURL commands from another terminal tab: ```bash @@ -153,11 +152,9 @@ curl -X POST \ ``` ### Pushing features to the online and offline stores -You can push data corresponding to a push source to the online and offline stores (note that timestamps need to be strings): - -You can also define a pushmode to push stream or batch data, either to the online store, offline store, or both. The feature server will throw an error if the online/offline store doesn't support the push api functionality. +The Python feature server also exposes an endpoint for [push sources](../../data-sources/push.md). This endpoint allows you to push data to the online and/or offline store. -The request definition for pushmode is a string parameter `to` where the options are: ["online", "offline", "online_and_offline"]. +The request definition for pushmode is a string parameter `to` where the options are: ["online", "offline", "online_and_offline"]. Note that timestamps need to be strings. ```text curl -X POST "http://localhost:6566/push" -d '{ "push_source_name": "driver_hourly_stats_push_source", diff --git a/docs/roadmap.md b/docs/roadmap.md index ae46eb9005..efe2164b9c 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -1,10 +1,9 @@ # Roadmap -The list below contains the functionality that contributors are planning to develop for Feast +The list below contains the functionality that contributors are planning to develop for Feast. -* Items below that are in development (or planned for development) will be indicated in parentheses. * We welcome contribution to all items in the roadmap! -* Have questions about the roadmap? Go to the Slack channel to ask on #feast-development +* Have questions about the roadmap? Go to the Slack channel to ask on #feast-development. * **Data Sources** * [x] [Snowflake source](https://docs.feast.dev/reference/data-sources/snowflake) @@ -50,9 +49,8 @@ The list below contains the functionality that contributors are planning to deve * [x] Kubernetes (See [guide](https://docs.feast.dev/how-to-guides/running-feast-in-production#4.3.-java-based-feature-server-deployed-on-kubernetes)) * **Feature Serving** * [x] Python Client - * [x] REST Feature Server (Python) (See [RFC](https://docs.google.com/document/d/1iXvFhAsJ5jgAhPOpTdB3j-Wj1S9x3Ev\_Wr6ZpnLzER4/edit)) - * [x] REST / gRPC Feature Server (Go) (Alpha release. See [docs](https://docs.feast.dev/reference/feature-servers/go-feature-retrieval) - * [x] gRPC Feature Server (Java) (Alpha release. See [#1497](https://github.com/feast-dev/feast/issues/1497)) + * [x] [Python feature server](https://docs.feast.dev/reference/feature-servers/python-feature-server) + * [x] [Go feature server](https://docs.feast.dev/reference/feature-servers/go-feature-server) * **Data Quality Management (See [RFC](https://docs.google.com/document/d/110F72d4NTv80p35wDSONxhhPBqWRwbZXG4f9mNEMd98/edit))** * [x] Data profiling and validation (Great Expectations) * **Feature Discovery and Governance** @@ -61,4 +59,4 @@ The list below contains the functionality that contributors are planning to deve * [x] Model-centric feature tracking (feature services) * [x] Amundsen integration (see [Feast extractor](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/feast_extractor.py)) * [x] DataHub integration (see [DataHub Feast docs](https://datahubproject.io/docs/generated/ingestion/sources/feast/)) - * [x] Feast Web UI (Alpha release. See [documentation](https://docs.feast.dev/reference/alpha-web-ui)) \ No newline at end of file + * [x] Feast Web UI (Alpha release. See [docs](https://docs.feast.dev/reference/alpha-web-ui)) \ No newline at end of file From ac55ce25388abfa35e93097bd14190eeba08a165 Mon Sep 17 00:00:00 2001 From: levpick Date: Mon, 25 Jul 2022 17:28:42 +0300 Subject: [PATCH 57/73] fix: Fixing Spark min / max entity df event timestamps range return order (#2735) fix: Fixing the return order of elements when calculating the min and max entity-DF event timestamps in the Spark offline store. Signed-off-by: Lev Pickovsky --- .../infra/offline_stores/contrib/spark_offline_store/spark.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py index 8e0badd732..2437714dec 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py @@ -330,8 +330,8 @@ def _get_entity_df_event_timestamp_range( df = spark_session.sql(entity_df).select(entity_df_event_timestamp_col) # TODO(kzhang132): need utc conversion here. entity_df_event_timestamp_range = ( - df.agg({entity_df_event_timestamp_col: "max"}).collect()[0][0], df.agg({entity_df_event_timestamp_col: "min"}).collect()[0][0], + df.agg({entity_df_event_timestamp_col: "max"}).collect()[0][0], ) else: raise InvalidEntityType(type(entity_df)) From 5ae2a34fff09ee9faf354a2fa58516c80a8d864d Mon Sep 17 00:00:00 2001 From: Felix Wang Date: Mon, 25 Jul 2022 16:00:42 -0700 Subject: [PATCH 58/73] chore: Update helm chart name (#2969) * Rename feast-python-server helm chart to feast-feature-server Signed-off-by: Felix Wang * Switch example to Python 3.8 Signed-off-by: Felix Wang * Update helm chart docs Signed-off-by: Felix Wang * Change name Signed-off-by: Felix Wang * Change references Signed-off-by: Felix Wang * Change another reference Signed-off-by: Felix Wang * Add arrow C++ library to example Dockerfile Signed-off-by: Felix Wang --- .../running-feast-in-production.md | 7 +- infra/charts/feast-feature-server/.helmignore | 23 ++++++ infra/charts/feast-feature-server/Chart.yaml | 12 +++ infra/charts/feast-feature-server/README.md | 82 +++++++++++++++++++ .../templates/_helpers.tpl | 52 ++++++++++++ .../templates/deployment.yaml | 61 ++++++++++++++ .../templates/service.yaml | 15 ++++ infra/charts/feast-feature-server/values.yaml | 57 +++++++++++++ infra/charts/feast-python-server/README.md | 2 +- 9 files changed, 304 insertions(+), 7 deletions(-) create mode 100644 infra/charts/feast-feature-server/.helmignore create mode 100644 infra/charts/feast-feature-server/Chart.yaml create mode 100644 infra/charts/feast-feature-server/README.md create mode 100644 infra/charts/feast-feature-server/templates/_helpers.tpl create mode 100644 infra/charts/feast-feature-server/templates/deployment.yaml create mode 100644 infra/charts/feast-feature-server/templates/service.yaml create mode 100644 infra/charts/feast-feature-server/values.yaml diff --git a/docs/how-to-guides/running-feast-in-production.md b/docs/how-to-guides/running-feast-in-production.md index eba3507f7d..f03629ea4b 100644 --- a/docs/how-to-guides/running-feast-in-production.md +++ b/docs/how-to-guides/running-feast-in-production.md @@ -257,7 +257,7 @@ helm repo update ``` 3. Run Helm Install ``` -helm install feast-release feast-charts/feast-python-server \ +helm install feast-release feast-charts/feast-feature-server \ --set global.registry.path=s3://feast/registries/prod \ --set global.project= ``` @@ -267,11 +267,6 @@ The service must have read access to the registry file on cloud storage. It will keep a copy of the registry in their memory and periodically refresh it, so expect some delays in update propagation in exchange for better performance. In order for the Go feature server to be enabled, you should set `go_feature_serving: True` in the `feature_store.yaml`. -#### Load balancing - -The next step would be to install an L7 Load Balancer (eg, [Envoy](https://www.envoyproxy.io/)) in front of the Go feature server. -For seamless integration with Kubernetes (including services created by Feast Helm chart) we recommend using [Istio](https://istio.io/) as Envoy's orchestrator. - ## 5. Ingesting features from a stream source Recently Feast added functionality for [stream ingestion](../reference/data-sources/push.md). diff --git a/infra/charts/feast-feature-server/.helmignore b/infra/charts/feast-feature-server/.helmignore new file mode 100644 index 0000000000..0e8a0eb36f --- /dev/null +++ b/infra/charts/feast-feature-server/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/infra/charts/feast-feature-server/Chart.yaml b/infra/charts/feast-feature-server/Chart.yaml new file mode 100644 index 0000000000..6c1afc9540 --- /dev/null +++ b/infra/charts/feast-feature-server/Chart.yaml @@ -0,0 +1,12 @@ +apiVersion: v2 +name: feast-feature-server +description: Feast Feature Server in Go or Python +type: application +version: 0.22.0 +keywords: + - machine learning + - big data + - mlops +home: https://feast.dev/ +sources: + - https://github.com/feast-dev/feast diff --git a/infra/charts/feast-feature-server/README.md b/infra/charts/feast-feature-server/README.md new file mode 100644 index 0000000000..a55451e788 --- /dev/null +++ b/infra/charts/feast-feature-server/README.md @@ -0,0 +1,82 @@ +# feast-feature-server + +![Version: 0.22.0](https://img.shields.io/badge/Version-0.22.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) + +Feast Feature Server in Go or Python + +**Homepage:** + +## Source Code + +* + +## Values + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| affinity | object | `{}` | | +| fullnameOverride | string | `""` | | +| image.pullPolicy | string | `"IfNotPresent"` | | +| image.repository | string | `""` | | +| image.tag | string | `""` | | +| imagePullSecrets | list | `[]` | | +| livenessProbe.initialDelaySeconds | int | `30` | | +| livenessProbe.periodSeconds | int | `30` | | +| nameOverride | string | `""` | | +| nodeSelector | object | `{}` | | +| podAnnotations | object | `{}` | | +| podSecurityContext | object | `{}` | | +| readinessProbe.initialDelaySeconds | int | `20` | | +| readinessProbe.periodSeconds | int | `10` | | +| replicaCount | int | `1` | | +| resources | object | `{}` | | +| securityContext | object | `{}` | | +| service.port | int | `80` | | +| service.type | string | `"ClusterIP"` | | +| tolerations | list | `[]` | | + +---------------------------------------------- +Autogenerated from chart metadata using [helm-docs v1.11.0](https://github.com/norwoodj/helm-docs/releases/v1.11.0) + + +Docker repository and tag are required. Helm install example: +``` +helm install feast-feature-server . --set image.repository=REPO --set image.tag=TAG +``` + +Deployment assumes that `feature_store.yaml` exists on docker image. Example docker image: +``` +FROM python:3.8 + +RUN apt update && \ + apt install -y jq + +RUN pip install pip --upgrade + +RUN pip install feast + +COPY feature_store.yaml /feature_store.yaml +``` + +Furthermore, if you wish to use the Go feature server, then you must install the Apache Arrow C++ libraries, and your `feature_store.yaml` should include `go_feature_server: True`. +For more details, see the [docs](https://docs.feast.dev/reference/feature-servers/go-feature-server). +The docker image might look like: +``` +FROM python:3.8 + +RUN apt update && \ + apt install -y jq + +RUN pip install pip --upgrade + +RUN pip install feast + +RUN apt update +RUN apt install -y -V ca-certificates lsb-release wget +RUN wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb +RUN apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb +RUN apt update +RUN apt -y install libarrow-dev + +COPY feature_store.yaml /feature_store.yaml +``` \ No newline at end of file diff --git a/infra/charts/feast-feature-server/templates/_helpers.tpl b/infra/charts/feast-feature-server/templates/_helpers.tpl new file mode 100644 index 0000000000..19c2febd13 --- /dev/null +++ b/infra/charts/feast-feature-server/templates/_helpers.tpl @@ -0,0 +1,52 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Expand the name of the chart. +*/}} +{{- define "feast-feature-server.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "feast-feature-server.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "feast-feature-server.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "feast-feature-server.labels" -}} +helm.sh/chart: {{ include "feast-feature-server.chart" . }} +{{ include "feast-feature-server.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "feast-feature-server.selectorLabels" -}} +app.kubernetes.io/name: {{ include "feast-feature-server.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} diff --git a/infra/charts/feast-feature-server/templates/deployment.yaml b/infra/charts/feast-feature-server/templates/deployment.yaml new file mode 100644 index 0000000000..69cf92f6c0 --- /dev/null +++ b/infra/charts/feast-feature-server/templates/deployment.yaml @@ -0,0 +1,61 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "feast-feature-server.fullname" . }} + labels: + {{- include "feast-feature-server.labels" . | nindent 4 }} +spec: + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: + {{- include "feast-feature-server.selectorLabels" . | nindent 6 }} + template: + metadata: + {{- with .Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "feast-feature-server.selectorLabels" . | nindent 8 }} + spec: + {{- with .Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml . | nindent 8 }} + {{- end }} + securityContext: + {{- toYaml .Values.podSecurityContext | nindent 8 }} + containers: + - name: {{ .Chart.Name }} + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} + command: ["feast", "serve", "-h", "0.0.0.0"] + ports: + - name: http + containerPort: 6566 + protocol: TCP + livenessProbe: + tcpSocket: + port: http + initialDelaySeconds: {{ .Values.livenessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.livenessProbe.periodSeconds }} + readinessProbe: + tcpSocket: + port: http + initialDelaySeconds: {{ .Values.readinessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.readinessProbe.periodSeconds }} + resources: + {{- toYaml .Values.resources | nindent 12 }} + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} diff --git a/infra/charts/feast-feature-server/templates/service.yaml b/infra/charts/feast-feature-server/templates/service.yaml new file mode 100644 index 0000000000..d6914828e4 --- /dev/null +++ b/infra/charts/feast-feature-server/templates/service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "feast-feature-server.name" . }} + labels: + {{- include "feast-feature-server.labels" . | nindent 4 }} +spec: + type: {{ .Values.service.type }} + ports: + - port: {{ .Values.service.port }} + targetPort: http + protocol: TCP + name: http + selector: + {{- include "feast-feature-server.selectorLabels" . | nindent 4 }} diff --git a/infra/charts/feast-feature-server/values.yaml b/infra/charts/feast-feature-server/values.yaml new file mode 100644 index 0000000000..f62f95a757 --- /dev/null +++ b/infra/charts/feast-feature-server/values.yaml @@ -0,0 +1,57 @@ +# Default values for feast. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +replicaCount: 1 + +image: + repository: "" + pullPolicy: IfNotPresent + tag: "" + +imagePullSecrets: [] +nameOverride: "" +fullnameOverride: "" + +podAnnotations: {} + +podSecurityContext: {} + # fsGroup: 2000 + +securityContext: {} + # capabilities: + # drop: + # - ALL + # readOnlyRootFilesystem: true + # runAsNonRoot: true + # runAsUser: 1000 + +service: + type: ClusterIP + port: 80 + +resources: {} + # We usually recommend not to specify default resources and to leave this as a conscious + # choice for the user. This also increases chances charts run on environments with little + # resources, such as Minikube. If you do want to specify resources, uncomment the following + # lines, adjust them as necessary, and remove the curly braces after 'resources:'. + # limits: + # cpu: 100m + # memory: 128Mi + # requests: + # cpu: 100m + # memory: 128Mi + +nodeSelector: {} + +tolerations: [] + +affinity: {} + +livenessProbe: + initialDelaySeconds: 30 + periodSeconds: 30 + +readinessProbe: + initialDelaySeconds: 20 + periodSeconds: 10 diff --git a/infra/charts/feast-python-server/README.md b/infra/charts/feast-python-server/README.md index 3f60cc6c54..17ce5be682 100644 --- a/infra/charts/feast-python-server/README.md +++ b/infra/charts/feast-python-server/README.md @@ -46,7 +46,7 @@ helm install feast-python-server . --set image.repository=REPO --set image.tag=T Deployment assumes that `feature_store.yaml` exists on docker image. Example docker image: ``` -FROM python:3.7 +FROM python:3.8 RUN apt update && \ apt install -y jq From e4507ac16540cb3a7e29c31121963a0fe8f79fe4 Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Mon, 25 Jul 2022 18:07:42 -0700 Subject: [PATCH 59/73] fix: Remove hard-coded integration test setup for AWS & GCP (#2970) * Fix Signed-off-by: Kevin Zhang * Fix lint Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * see if fix works Signed-off-by: Kevin Zhang * Fix lint Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix lint Signed-off-by: Kevin Zhang --- .github/workflows/pr_integration_tests.yml | 2 - .github/workflows/unit_tests.yml | 3 +- CONTRIBUTING.md | 59 +++++++++++++++++-- README.md | 12 ++-- sdk/python/tests/conftest.py | 15 +++-- .../feature_repos/repo_configuration.py | 12 ++-- .../universal/data_sources/bigquery.py | 6 +- .../universal/data_sources/redshift.py | 24 +++++--- .../registration/test_feature_store.py | 10 +++- .../integration/registration/test_registry.py | 6 +- setup.py | 2 +- 11 files changed, 111 insertions(+), 40 deletions(-) diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index 8f64950a30..58bf45c687 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -175,8 +175,6 @@ jobs: if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak env: FEAST_SERVER_DOCKER_IMAGE_TAG: ${{ needs.build-docker-image.outputs.DOCKER_IMAGE_TAG }} - FEAST_USAGE: "False" - IS_TEST: "True" SNOWFLAKE_CI_DEPLOYMENT: ${{ secrets.SNOWFLAKE_CI_DEPLOYMENT }} SNOWFLAKE_CI_USER: ${{ secrets.SNOWFLAKE_CI_USER }} SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }} diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index b3fadb2121..7bbe9ad6ac 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -70,13 +70,12 @@ jobs: run: make install-python-ci-dependencies - name: Test Python env: - IS_TEST: "True" SNOWFLAKE_CI_DEPLOYMENT: ${{ secrets.SNOWFLAKE_CI_DEPLOYMENT }} SNOWFLAKE_CI_USER: ${{ secrets.SNOWFLAKE_CI_USER }} SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }} SNOWFLAKE_CI_ROLE: ${{ secrets.SNOWFLAKE_CI_ROLE }} SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }} - run: FEAST_USAGE=False pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests + run: pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests - name: Upload coverage to Codecov uses: codecov/codecov-action@v1 with: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a6148458a7..a8671d9986 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -139,7 +139,7 @@ There are two sets of tests you can run: #### Local integration tests For this approach of running tests, you'll need to have docker set up locally: [Get Docker](https://docs.docker.com/get-docker/) -It leverages a file based offline store to test against emulated versions of Datastore, DynamoDB, and Redis, using ephemeral containers. +It leverages a file based offline store to test against emulated versions of Datastore, DynamoDB, and Redis, using ephemeral containers. These tests create new temporary tables / datasets locally only, and they are cleaned up. when the containers are torn down. @@ -161,17 +161,48 @@ To test across clouds, on top of setting up Redis, you also need GCP / AWS / Sno gcloud auth login gcloud auth application-default login ``` -3. Export `GCLOUD_PROJECT=[your project]` to your .zshrc +- When you run `gcloud auth application-default login`, you should see some output of the form: + ``` + Credentials saved to file: [$HOME/.config/gcloud/application_default_credentials.json] + ``` +- You should run `export GOOGLE_APPLICATION_CREDENTIALS="$HOME/.config/gcloud/application_default_credentials.json”` to add the application credentials to your .zshrc or .bashrc. +3. Run `export GCLOUD_PROJECT=[your project]` to your .zshrc or .bashrc. +4. Running `gcloud config list` should give you something like this: +```sh +$ gcloud config list +[core] +account = [your email] +disable_usage_reporting = True +project = [your project] + +Your active configuration is: [default] +``` +5. Export gcp specific environment variables. Namely, +```sh +export GCS_REGION='[your gcs region e.g US]' +export GCS_STAGING_LOCATION='[your gcs staging location]' +``` **AWS** 1. TODO(adchia): flesh out setting up AWS login (or create helper script) -2. Modify `RedshiftDataSourceCreator` to use your credentials +2. To run the AWS Redshift and Dynamo integration tests you will have to export your own AWS credentials. Namely, + +```sh +export AWS_REGION='[your aws region]' +export AWS_CLUSTER_ID='[your aws cluster id]' +export AWS_USER='[your aws user]' +export AWS_DB='[your aws database]' +export AWS_STAGING_LOCATION='[your s3 staging location uri]' +export AWS_IAM_ROLE='[redshift and s3 access role]' +export AWS_LAMBDA_ROLE='[your aws lambda execution role]' +export AWS_REGISTRY_PATH='[your aws registry path]' +``` **Snowflake** -1. See https://signup.snowflake.com/ to setup a trial. +1. See https://signup.snowflake.com/ to setup a trial. 2. Then to run successfully, you'll need some environment variables setup: ```sh -export SNOWFLAKE_CI_DEPLOYMENT='[snowflake_deployment]' +export SNOWFLAKE_CI_DEPLOYMENT='[snowflake_deployment]' export SNOWFLAKE_CI_USER='[your user]' export SNOWFLAKE_CI_PASSWORD='[your pw]' export SNOWFLAKE_CI_ROLE='[your CI role e.g. SYSADMIN]' @@ -180,12 +211,28 @@ export SNOWFLAKE_CI_WAREHOUSE='[your warehouse]' Then run `make test-python-integration`. Note that for Snowflake / GCP / AWS, this will create new temporary tables / datasets. +#### Running specific provider tests or running your test against specific online or offline stores + +1. If you don't need to have your test run against all of the providers(`gcp`, `aws`, and `snowflake`) or don't need to run against all of the online stores, you can tag your test with specific providers or stores that you need(`@pytest.mark.universal_online_stores` or `@pytest.mark.universal_online_stores` with the `only` parameter). The `only` parameter selects specific offline providers and online stores that your test will test against. Example: + +```python +# Only parametrizes this test with the sqlite online store +@pytest.mark.universal_online_stores(only=["sqlite"]) +def test_feature_get_online_features_types_match(): +``` + +2. You can also filter tests to run by using pytest's cli filtering. Instead of using the make commands to test Feast, you can filter tests by name with the `-k` parameter. The parametrized integration tests are all uniquely identified by their provider and online store so the `-k` option can select only the tests that you need to run. For example, to run only Redshift related tests, you can use the following command: + +```sh +python -m pytest -n 8 --integration -k Redshift sdk/python/tests +``` + #### (Experimental) Run full integration tests against containerized services Test across clouds requires existing accounts on GCP / AWS / Snowflake, and may incur costs when using these services. For this approach of running tests, you'll need to have docker set up locally: [Get Docker](https://docs.docker.com/get-docker/) -It's possible to run some integration tests against emulated local versions of these services, using ephemeral containers. +It's possible to run some integration tests against emulated local versions of these services, using ephemeral containers. These tests create new temporary tables / datasets locally only, and they are cleaned up. when the containers are torn down. The services with containerized replacements currently implemented are: diff --git a/README.md b/README.md index efe194687f..4616686e46 100644 --- a/README.md +++ b/README.md @@ -135,11 +135,10 @@ pprint(feature_vector) ## 📦 Functionality and Roadmap -The list below contains the functionality that contributors are planning to develop for Feast +The list below contains the functionality that contributors are planning to develop for Feast. -* Items below that are in development (or planned for development) will be indicated in parentheses. * We welcome contribution to all items in the roadmap! -* Have questions about the roadmap? Go to the Slack channel to ask on #feast-development +* Have questions about the roadmap? Go to the Slack channel to ask on #feast-development. * **Data Sources** * [x] [Snowflake source](https://docs.feast.dev/reference/data-sources/snowflake) @@ -185,9 +184,8 @@ The list below contains the functionality that contributors are planning to deve * [x] Kubernetes (See [guide](https://docs.feast.dev/how-to-guides/running-feast-in-production#4.3.-java-based-feature-server-deployed-on-kubernetes)) * **Feature Serving** * [x] Python Client - * [x] REST Feature Server (Python) (See [RFC](https://docs.google.com/document/d/1iXvFhAsJ5jgAhPOpTdB3j-Wj1S9x3Ev\_Wr6ZpnLzER4/edit)) - * [x] REST / gRPC Feature Server (Go) (Alpha release. See [docs](https://docs.feast.dev/reference/feature-servers/go-feature-retrieval) - * [x] gRPC Feature Server (Java) (Alpha release. See [#1497](https://github.com/feast-dev/feast/issues/1497)) + * [x] [Python feature server](https://docs.feast.dev/reference/feature-servers/python-feature-server) + * [x] [Go feature server](https://docs.feast.dev/reference/feature-servers/go-feature-server) * **Data Quality Management (See [RFC](https://docs.google.com/document/d/110F72d4NTv80p35wDSONxhhPBqWRwbZXG4f9mNEMd98/edit))** * [x] Data profiling and validation (Great Expectations) * **Feature Discovery and Governance** @@ -196,7 +194,7 @@ The list below contains the functionality that contributors are planning to deve * [x] Model-centric feature tracking (feature services) * [x] Amundsen integration (see [Feast extractor](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/feast_extractor.py)) * [x] DataHub integration (see [DataHub Feast docs](https://datahubproject.io/docs/generated/ingestion/sources/feast/)) - * [x] Feast Web UI (Alpha release. See [documentation](https://docs.feast.dev/reference/alpha-web-ui)) + * [x] Feast Web UI (Alpha release. See [docs](https://docs.feast.dev/reference/alpha-web-ui)) ## 🎓 Important Resources diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index 7da791f64f..06b77f13b3 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -13,6 +13,7 @@ # limitations under the License. import logging import multiprocessing +import os import socket from contextlib import closing from datetime import datetime, timedelta @@ -24,13 +25,15 @@ import pytest from _pytest.nodes import Item -from feast import FeatureStore -from feast.wait import wait_retry_backoff -from tests.data.data_creator import create_basic_driver_dataset -from tests.integration.feature_repos.integration_test_repo_config import ( +os.environ["FEAST_USAGE"] = "False" +os.environ["IS_TEST"] = "True" +from feast import FeatureStore # noqa: E402 +from feast.wait import wait_retry_backoff # noqa: E402 +from tests.data.data_creator import create_basic_driver_dataset # noqa: E402 +from tests.integration.feature_repos.integration_test_repo_config import ( # noqa: E402 IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.repo_configuration import ( +from tests.integration.feature_repos.repo_configuration import ( # noqa: E402 AVAILABLE_OFFLINE_STORES, AVAILABLE_ONLINE_STORES, OFFLINE_STORE_TO_PROVIDER_CONFIG, @@ -39,7 +42,7 @@ construct_test_environment, construct_universal_test_data, ) -from tests.integration.feature_repos.universal.data_sources.file import ( +from tests.integration.feature_repos.universal.data_sources.file import ( # noqa: E402 FileDataSourceCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 75d6509f3c..bad2f52906 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -65,8 +65,6 @@ ) DYNAMO_CONFIG = {"type": "dynamodb", "region": "us-west-2"} -# Port 12345 will chosen as default for redis node configuration because Redis Cluster is started off of nodes -# 6379 -> 6384. This causes conflicts in cli integration tests so we manually keep them separate. REDIS_CONFIG = {"type": "redis", "connection_string": "localhost:6379,db=0"} REDIS_CLUSTER_CONFIG = { "type": "redis", @@ -390,7 +388,10 @@ def construct_test_environment( feature_server = AwsLambdaFeatureServerConfig( enabled=True, - execution_role_name="arn:aws:iam::402087665549:role/lambda_execution_role", + execution_role_name=os.getenv( + "AWS_LAMBDA_ROLE", + "arn:aws:iam::402087665549:role/lambda_execution_role", + ), ) else: @@ -402,9 +403,12 @@ def construct_test_environment( if ( test_repo_config.python_feature_server and test_repo_config.provider == "aws" ) or test_repo_config.registry_location == RegistryLocation.S3: + aws_registry_path = os.getenv( + "AWS_REGISTRY_PATH", "s3://feast-integration-tests/registries" + ) registry: Union[ str, RegistryConfig - ] = f"s3://feast-integration-tests/registries/{project}/registry.db" + ] = f"{aws_registry_path}/{project}/registry.db" else: registry = RegistryConfig( path=str(Path(repo_dir_name) / "registry.db"), diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py index 0f41176bd1..384037eef1 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py @@ -1,3 +1,4 @@ +import os import uuid from typing import Dict, List, Optional @@ -53,7 +54,10 @@ def teardown(self): def create_offline_store_config(self): return BigQueryOfflineStoreConfig( - location="US", gcs_staging_location="gs://feast-export/" + location=os.getenv("GCS_REGION", "US"), + gcs_staging_location=os.getenv( + "GCS_STAGING_LOCATION", "gs://feast-export/" + ), ) def create_data_source( diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py index 3b2794393f..c92a413616 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py @@ -1,3 +1,4 @@ +import os import uuid from typing import Dict, List, Optional @@ -24,16 +25,23 @@ class RedshiftDataSourceCreator(DataSourceCreator): def __init__(self, project_name: str, *args, **kwargs): super().__init__(project_name) - self.client = aws_utils.get_redshift_data_client("us-west-2") - self.s3 = aws_utils.get_s3_resource("us-west-2") + self.client = aws_utils.get_redshift_data_client( + os.getenv("AWS_REGION", "us-west-2") + ) + self.s3 = aws_utils.get_s3_resource(os.getenv("AWS_REGION", "us-west-2")) self.offline_store_config = RedshiftOfflineStoreConfig( - cluster_id="feast-integration-tests", - region="us-west-2", - user="admin", - database="feast", - s3_staging_location="s3://feast-integration-tests/redshift/tests/ingestion", - iam_role="arn:aws:iam::402087665549:role/redshift_s3_access_role", + cluster_id=os.getenv("AWS_CLUSTER_ID", "feast-integration-tests"), + region=os.getenv("AWS_REGION", "us-west-2"), + user=os.getenv("AWS_USER", "admin"), + database=os.getenv("AWS_DB", "feast"), + s3_staging_location=os.getenv( + "AWS_STAGING_LOCATION", + "s3://feast-integration-tests/redshift/tests/ingestion", + ), + iam_role=os.getenv( + "AWS_IAM_ROLE", "arn:aws:iam::402087665549:role/redshift_s3_access_role" + ), ) def create_data_source( diff --git a/sdk/python/tests/integration/registration/test_feature_store.py b/sdk/python/tests/integration/registration/test_feature_store.py index 6243e27fca..12e9658649 100644 --- a/sdk/python/tests/integration/registration/test_feature_store.py +++ b/sdk/python/tests/integration/registration/test_feature_store.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import os import time from datetime import datetime, timedelta from tempfile import mkstemp @@ -75,12 +76,17 @@ def feature_store_with_gcs_registry(): @pytest.fixture def feature_store_with_s3_registry(): + aws_registry_path = os.getenv( + "AWS_REGISTRY_PATH", "s3://feast-integration-tests/registries" + ) return FeatureStore( config=RepoConfig( - registry=f"s3://feast-integration-tests/registries/{int(time.time() * 1000)}/registry.db", + registry=f"{aws_registry_path}/{int(time.time() * 1000)}/registry.db", project="default", provider="aws", - online_store=DynamoDBOnlineStoreConfig(region="us-west-2"), + online_store=DynamoDBOnlineStoreConfig( + region=os.getenv("AWS_REGION", "us-west-2") + ), offline_store=FileOfflineStoreConfig(), ) ) diff --git a/sdk/python/tests/integration/registration/test_registry.py b/sdk/python/tests/integration/registration/test_registry.py index e6309779f9..e192657074 100644 --- a/sdk/python/tests/integration/registration/test_registry.py +++ b/sdk/python/tests/integration/registration/test_registry.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import os import time from datetime import timedelta from tempfile import mkstemp @@ -63,8 +64,11 @@ def gcs_registry() -> Registry: @pytest.fixture def s3_registry() -> Registry: + aws_registry_path = os.getenv( + "AWS_REGISTRY_PATH", "s3://feast-integration-tests/registries" + ) registry_config = RegistryConfig( - path=f"s3://feast-integration-tests/registries/{int(time.time() * 1000)}/registry.db", + path=f"{aws_registry_path}/{int(time.time() * 1000)}/registry.db", cache_ttl_seconds=600, ) return Registry(registry_config, None) diff --git a/setup.py b/setup.py index c6d4aa30b7..e9d7941445 100644 --- a/setup.py +++ b/setup.py @@ -59,7 +59,7 @@ "mmh3", "numpy>=1.22,<3", "pandas>=1.4.3,<2", - "pandavro==1.5.*", + "pandavro==1.5.*", # For some reason pandavro higher than 1.5.* only support pandas less than 1.3. "protobuf>3.20,<4", "proto-plus>=1.20.0,<2", "pyarrow>=4,<9", From 3ce51391e0b2ebdec68c81d93b54f5d06bb427a6 Mon Sep 17 00:00:00 2001 From: kfiring Date: Wed, 27 Jul 2022 03:05:15 +0800 Subject: [PATCH 60/73] fix: Spark source support table with pattern "db.table" (#2606) * 1. fix: spark source support table with pattern "db.table" (#2605 https://github.com/feast-dev/feast/issues/2605) Signed-off-by: Kevin Zhang * Fix lint Signed-off-by: Kevin Zhang Co-authored-by: leonpeng Co-authored-by: Kevin Zhang --- .../offline_stores/contrib/spark_offline_store/spark_source.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py index 0ddeaad354..454e7ee87e 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py @@ -177,7 +177,8 @@ def get_table_query_string(self) -> str: """Returns a string that can directly be used to reference this table in SQL""" if self.table: # Backticks make sure that spark sql knows this a table reference. - return f"`{self.table}`" + table = ".".join([f"`{x}`" for x in self.table.split(".")]) + return table if self.query: return f"({self.query})" From 26f6b69b0e2c8a4ea37b43e3d1eaa4cdb8c085a9 Mon Sep 17 00:00:00 2001 From: Felix Wang Date: Tue, 26 Jul 2022 15:47:15 -0700 Subject: [PATCH 61/73] fix: Fix file offline store logic for feature views without ttl (#2971) * Add new test for historical retrieval with feature views with no ttl Signed-off-by: Felix Wang * Fix no ttl logic Signed-off-by: Felix Wang --- sdk/python/feast/infra/offline_stores/file.py | 8 ++ .../test_universal_historical_retrieval.py | 103 ++++++++++++++++-- 2 files changed, 104 insertions(+), 7 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/file.py b/sdk/python/feast/infra/offline_stores/file.py index 1af98c1437..829bd36c3d 100644 --- a/sdk/python/feast/infra/offline_stores/file.py +++ b/sdk/python/feast/infra/offline_stores/file.py @@ -635,6 +635,14 @@ def _filter_ttl( ) ] + df_to_join = df_to_join.persist() + else: + df_to_join = df_to_join[ + # do not drop entity rows if one of the sources returns NaNs + df_to_join[timestamp_field].isna() + | (df_to_join[timestamp_field] <= df_to_join[entity_df_event_timestamp_col]) + ] + df_to_join = df_to_join.persist() return df_to_join diff --git a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py index 32e6e52d18..87bf59fe9f 100644 --- a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py +++ b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py @@ -115,13 +115,17 @@ def get_expected_training_df( entity_df.to_dict("records"), event_timestamp ) + # Set sufficiently large ttl that it effectively functions as infinite for the calculations below. + default_ttl = timedelta(weeks=52) + # Manually do point-in-time join of driver, customer, and order records against # the entity df for entity_row in entity_rows: customer_record = find_asof_record( customer_records, ts_key=customer_fv.batch_source.timestamp_field, - ts_start=entity_row[event_timestamp] - customer_fv.ttl, + ts_start=entity_row[event_timestamp] + - get_feature_view_ttl(customer_fv, default_ttl), ts_end=entity_row[event_timestamp], filter_keys=["customer_id"], filter_values=[entity_row["customer_id"]], @@ -129,7 +133,8 @@ def get_expected_training_df( driver_record = find_asof_record( driver_records, ts_key=driver_fv.batch_source.timestamp_field, - ts_start=entity_row[event_timestamp] - driver_fv.ttl, + ts_start=entity_row[event_timestamp] + - get_feature_view_ttl(driver_fv, default_ttl), ts_end=entity_row[event_timestamp], filter_keys=["driver_id"], filter_values=[entity_row["driver_id"]], @@ -137,7 +142,8 @@ def get_expected_training_df( order_record = find_asof_record( order_records, ts_key=customer_fv.batch_source.timestamp_field, - ts_start=entity_row[event_timestamp] - order_fv.ttl, + ts_start=entity_row[event_timestamp] + - get_feature_view_ttl(order_fv, default_ttl), ts_end=entity_row[event_timestamp], filter_keys=["customer_id", "driver_id"], filter_values=[entity_row["customer_id"], entity_row["driver_id"]], @@ -145,7 +151,8 @@ def get_expected_training_df( origin_record = find_asof_record( location_records, ts_key=location_fv.batch_source.timestamp_field, - ts_start=order_record[event_timestamp] - location_fv.ttl, + ts_start=order_record[event_timestamp] + - get_feature_view_ttl(location_fv, default_ttl), ts_end=order_record[event_timestamp], filter_keys=["location_id"], filter_values=[order_record["origin_id"]], @@ -153,7 +160,8 @@ def get_expected_training_df( destination_record = find_asof_record( location_records, ts_key=location_fv.batch_source.timestamp_field, - ts_start=order_record[event_timestamp] - location_fv.ttl, + ts_start=order_record[event_timestamp] + - get_feature_view_ttl(location_fv, default_ttl), ts_end=order_record[event_timestamp], filter_keys=["location_id"], filter_values=[order_record["destination_id"]], @@ -161,14 +169,16 @@ def get_expected_training_df( global_record = find_asof_record( global_records, ts_key=global_fv.batch_source.timestamp_field, - ts_start=order_record[event_timestamp] - global_fv.ttl, + ts_start=order_record[event_timestamp] + - get_feature_view_ttl(global_fv, default_ttl), ts_end=order_record[event_timestamp], ) field_mapping_record = find_asof_record( field_mapping_records, ts_key=field_mapping_fv.batch_source.timestamp_field, - ts_start=order_record[event_timestamp] - field_mapping_fv.ttl, + ts_start=order_record[event_timestamp] + - get_feature_view_ttl(field_mapping_fv, default_ttl), ts_end=order_record[event_timestamp], ) @@ -666,6 +676,78 @@ def test_historical_features_persisting( ) +@pytest.mark.integration +@pytest.mark.universal_offline_stores +@pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) +def test_historical_features_with_no_ttl( + environment, universal_data_sources, full_feature_names +): + store = environment.feature_store + + (entities, datasets, data_sources) = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + # Remove ttls. + feature_views.customer.ttl = timedelta(seconds=0) + feature_views.order.ttl = timedelta(seconds=0) + feature_views.global_fv.ttl = timedelta(seconds=0) + feature_views.field_mapping.ttl = timedelta(seconds=0) + + store.apply([driver(), customer(), location(), *feature_views.values()]) + + entity_df = datasets.entity_df.drop( + columns=["order_id", "origin_id", "destination_id"] + ) + + job = store.get_historical_features( + entity_df=entity_df, + features=[ + "customer_profile:current_balance", + "customer_profile:avg_passenger_count", + "customer_profile:lifetime_trip_count", + "order:order_is_success", + "global_stats:num_rides", + "global_stats:avg_ride_length", + "field_mapping:feature_name", + ], + full_feature_names=full_feature_names, + ) + + event_timestamp = DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL + expected_df = get_expected_training_df( + datasets.customer_df, + feature_views.customer, + datasets.driver_df, + feature_views.driver, + datasets.orders_df, + feature_views.order, + datasets.location_df, + feature_views.location, + datasets.global_df, + feature_views.global_fv, + datasets.field_mapping_df, + feature_views.field_mapping, + entity_df, + event_timestamp, + full_feature_names, + ).drop( + columns=[ + response_feature_name("conv_rate_plus_100", full_feature_names), + response_feature_name("conv_rate_plus_100_rounded", full_feature_names), + response_feature_name("avg_daily_trips", full_feature_names), + response_feature_name("conv_rate", full_feature_names), + "origin__temperature", + "destination__temperature", + ] + ) + + assert_frame_equal( + expected_df, + job.to_df(), + keys=[event_timestamp, "driver_id", "customer_id"], + ) + + @pytest.mark.integration @pytest.mark.universal_offline_stores def test_historical_features_from_bigquery_sources_containing_backfills(environment): @@ -781,6 +863,13 @@ def response_feature_name(feature: str, full_feature_names: bool) -> str: return feature +def get_feature_view_ttl( + feature_view: FeatureView, default_ttl: timedelta +) -> timedelta: + """Returns the ttl of a feature view if it is non-zero. Otherwise returns the specified default.""" + return feature_view.ttl if feature_view.ttl else default_ttl + + def assert_feature_service_correctness( store, feature_service, full_feature_names, entity_df, expected_df, event_timestamp ): From 5edf4b0332a298a0e172dd58e0a627efe5705eec Mon Sep 17 00:00:00 2001 From: Felix Wang Date: Wed, 27 Jul 2022 14:59:05 -0700 Subject: [PATCH 62/73] fix: Switch mysql log string to use regex (#2976) Switch log string to use regex Signed-off-by: Felix Wang --- sdk/python/tests/integration/registration/test_sql_registry.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/python/tests/integration/registration/test_sql_registry.py b/sdk/python/tests/integration/registration/test_sql_registry.py index 23a19f664a..286b1abd21 100644 --- a/sdk/python/tests/integration/registration/test_sql_registry.py +++ b/sdk/python/tests/integration/registration/test_sql_registry.py @@ -87,7 +87,8 @@ def mysql_registry(): container.start() - log_string_to_wait_for = "/usr/sbin/mysqld: ready for connections. Version: '8.0.29' socket: '/var/run/mysqld/mysqld.sock' port: 3306" + # The log string uses '8.0.*' since the version might be changed as new Docker images are pushed. + log_string_to_wait_for = "/usr/sbin/mysqld: ready for connections. Version: '8.0.*' socket: '/var/run/mysqld/mysqld.sock' port: 3306" waited = wait_for_logs( container=container, predicate=log_string_to_wait_for, From f2696e0149b21304145888a1c4292f79e9e76596 Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Fri, 29 Jul 2022 14:18:33 -0400 Subject: [PATCH 63/73] ci: Fix pip install issues from grpcio version mismatches (#2984) * ci: Fix pip install issues from grpcio version mismatches Signed-off-by: Danny Chiao * fix verify-wheel Signed-off-by: Danny Chiao * fix verify-wheel Signed-off-by: Danny Chiao --- .github/workflows/build_wheels.yml | 2 +- Makefile | 2 +- pyproject.toml | 2 +- .../requirements/py3.10-ci-requirements.txt | 56 +++++++++-------- .../requirements/py3.10-requirements.txt | 22 ++++--- .../requirements/py3.8-ci-requirements.txt | 58 ++++++++--------- .../requirements/py3.8-requirements.txt | 24 +++---- .../requirements/py3.9-ci-requirements.txt | 62 ++++++++++--------- .../requirements/py3.9-requirements.txt | 22 ++++--- setup.py | 8 +-- 10 files changed, 135 insertions(+), 123 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index c47a8ec5c3..096bdb5b81 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -204,7 +204,7 @@ jobs: env: COMPILE_GO: "True" run: | - pip install 'grpcio-tools==1.47.0' 'pybindgen==0.22.0' + pip install 'grpcio-tools==1.48.0' 'pybindgen==0.22.0' go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.26.0 go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.1.0 pip install dist/*tar.gz diff --git a/Makefile b/Makefile index 915ac907f7..288da43fcd 100644 --- a/Makefile +++ b/Makefile @@ -196,7 +196,7 @@ install-go-ci-dependencies: python -m pip install pybindgen==0.22.0 protobuf==3.20.1 install-protoc-dependencies: - pip install grpcio-tools==1.47.0 mypy-protobuf==3.1.0 + pip install grpcio-tools==1.48.0 mypy-protobuf==3.1.0 compile-protos-go: install-go-proto-dependencies install-protoc-dependencies python setup.py build_go_protos diff --git a/pyproject.toml b/pyproject.toml index 1fef4c27c8..c89f1d9cc7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=60", "wheel", "setuptools_scm>=6.2", "grpcio", "grpcio-tools==1.47.0", "mypy-protobuf==3.1", "sphinx!=4.0.0"] +requires = ["setuptools>=60", "wheel", "setuptools_scm>=6.2", "grpcio", "grpcio-tools>=1.47.0", "mypy-protobuf==3.1", "sphinx!=4.0.0"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 03bc8b9a28..591787a27b 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -48,7 +48,7 @@ async-timeout==4.0.2 # via # aiohttp # redis -attrs==21.4.0 +attrs==22.1.0 # via # aiohttp # bowler @@ -170,11 +170,11 @@ entrypoints==0.4 # via altair execnet==1.9.0 # via pytest-xdist -executing==0.8.3 +executing==0.9.1 # via stack-data fastapi==0.79.0 # via feast (setup.py) -fastavro==1.5.2 +fastavro==1.5.3 # via # feast (setup.py) # pandavro @@ -211,7 +211,7 @@ google-api-core[grpc]==2.8.2 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.53.0 +google-api-python-client==2.55.0 # via firebase-admin google-auth==2.9.1 # via @@ -226,7 +226,7 @@ google-auth-httplib2==0.1.0 # via google-api-python-client google-auth-oauthlib==0.5.2 # via gcsfs -google-cloud-bigquery[pandas]==3.2.0 +google-cloud-bigquery[pandas]==3.3.0 # via feast (setup.py) google-cloud-bigquery-storage==2.14.1 # via @@ -261,7 +261,9 @@ googleapis-common-protos==1.56.4 # tensorflow-metadata great-expectations==0.14.13 # via feast (setup.py) -grpcio==1.47.0 +greenlet==1.1.2 + # via sqlalchemy +grpcio==1.48.0 # via # feast (setup.py) # google-api-core @@ -270,13 +272,13 @@ grpcio==1.47.0 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-reflection==1.47.0 +grpcio-reflection==1.48.0 # via feast (setup.py) -grpcio-status==1.47.0 +grpcio-status==1.48.0 # via google-api-core -grpcio-testing==1.47.0 +grpcio-testing==1.48.0 # via feast (setup.py) -grpcio-tools==1.47.0 +grpcio-tools==1.48.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -290,7 +292,7 @@ httplib2==0.20.4 # google-auth-httplib2 httptools==0.4.0 # via uvicorn -identify==2.5.1 +identify==2.5.2 # via pre-commit idna==3.3 # via @@ -327,7 +329,7 @@ jsonpatch==1.32 # via great-expectations jsonpointer==2.3 # via jsonpatch -jsonschema==4.7.2 +jsonschema==4.8.0 # via # altair # feast (setup.py) @@ -341,6 +343,7 @@ markupsafe==2.1.1 # via # jinja2 # moto + # werkzeug matplotlib-inline==0.1.3 # via ipython mccabe==0.6.1 @@ -375,7 +378,7 @@ multidict==6.0.2 # via # aiohttp # yarl -mypy==0.961 +mypy==0.971 # via # feast (setup.py) # sqlalchemy @@ -625,7 +628,7 @@ requests-oauthlib==1.3.1 # msrest responses==0.21.0 # via moto -rsa==4.8 +rsa==4.9 # via google-auth ruamel-yaml==0.17.17 # via great-expectations @@ -647,7 +650,6 @@ six==1.16.0 # msrestazure # pandavro # python-dateutil - # virtualenv sniffio==1.2.0 # via anyio snowballstemmer==2.2.0 @@ -719,7 +721,7 @@ traitlets==5.3.0 # jupyter-core # matplotlib-inline # nbformat -trino==0.314.0 +trino==0.315.0 # via feast (setup.py) typeguard==2.13.3 # via feast (setup.py) @@ -727,21 +729,21 @@ types-protobuf==3.19.22 # via # feast (setup.py) # mypy-protobuf -types-python-dateutil==2.8.18 +types-python-dateutil==2.8.19 # via feast (setup.py) -types-pytz==2022.1.1 +types-pytz==2022.1.2 # via feast (setup.py) -types-pyyaml==6.0.10 +types-pyyaml==6.0.11 # via feast (setup.py) -types-redis==4.3.4 +types-redis==4.3.11 # via feast (setup.py) -types-requests==2.28.2 +types-requests==2.28.5 # via feast (setup.py) -types-setuptools==63.2.0 +types-setuptools==63.2.2 # via feast (setup.py) types-tabulate==0.8.11 # via feast (setup.py) -types-urllib3==1.26.16 +types-urllib3==1.26.17 # via types-requests typing-extensions==4.3.0 # via @@ -756,7 +758,7 @@ tzlocal==4.2 # via great-expectations uritemplate==4.1.1 # via google-api-python-client -urllib3==1.26.10 +urllib3==1.26.11 # via # botocore # feast (setup.py) @@ -768,11 +770,11 @@ uvicorn[standard]==0.18.2 # via feast (setup.py) uvloop==0.16.0 # via uvicorn -virtualenv==20.15.1 +virtualenv==20.16.2 # via pre-commit volatile==2.1.0 # via bowler -watchfiles==0.15.0 +watchfiles==0.16.1 # via uvicorn wcwidth==0.2.5 # via prompt-toolkit @@ -780,7 +782,7 @@ websocket-client==1.3.3 # via docker websockets==10.3 # via uvicorn -werkzeug==2.1.2 +werkzeug==2.2.1 # via moto wheel==0.37.1 # via pip-tools diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 115a627341..f86636e227 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -12,7 +12,7 @@ anyio==3.6.1 # watchfiles appdirs==1.4.4 # via fissix -attrs==21.4.0 +attrs==22.1.0 # via # bowler # jsonschema @@ -40,13 +40,13 @@ dill==0.3.5.1 # via feast (setup.py) fastapi==0.79.0 # via feast (setup.py) -fastavro==1.5.2 +fastavro==1.5.3 # via # feast (setup.py) # pandavro fissix==21.11.13 # via bowler -fsspec==2022.5.0 +fsspec==2022.7.1 # via dask google-api-core==2.8.2 # via feast (setup.py) @@ -57,11 +57,13 @@ googleapis-common-protos==1.56.4 # feast (setup.py) # google-api-core # tensorflow-metadata -grpcio==1.47.0 +greenlet==1.1.2 + # via sqlalchemy +grpcio==1.48.0 # via # feast (setup.py) # grpcio-reflection -grpcio-reflection==1.47.0 +grpcio-reflection==1.48.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -73,7 +75,7 @@ idna==3.3 # requests jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.7.2 +jsonschema==4.8.0 # via feast (setup.py) locket==1.0.0 # via partd @@ -83,7 +85,7 @@ mmh3==3.0.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -mypy==0.961 +mypy==0.971 # via sqlalchemy mypy-extensions==0.4.3 # via mypy @@ -144,7 +146,7 @@ pyyaml==6.0 # uvicorn requests==2.28.1 # via google-api-core -rsa==4.8 +rsa==4.9 # via google-auth six==1.16.0 # via @@ -183,7 +185,7 @@ typing-extensions==4.3.0 # mypy # pydantic # sqlalchemy2-stubs -urllib3==1.26.10 +urllib3==1.26.11 # via requests uvicorn[standard]==0.18.2 # via feast (setup.py) @@ -191,7 +193,7 @@ uvloop==0.16.0 # via uvicorn volatile==2.1.0 # via bowler -watchfiles==0.15.0 +watchfiles==0.16.1 # via uvicorn websockets==10.3 # via uvicorn diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index d25c433bac..a55759ca7d 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -48,7 +48,7 @@ async-timeout==4.0.2 # via # aiohttp # redis -attrs==21.4.0 +attrs==22.1.0 # via # aiohttp # bowler @@ -174,11 +174,11 @@ entrypoints==0.4 # via altair execnet==1.9.0 # via pytest-xdist -executing==0.8.3 +executing==0.9.1 # via stack-data fastapi==0.79.0 # via feast (setup.py) -fastavro==1.5.2 +fastavro==1.5.3 # via # feast (setup.py) # pandavro @@ -215,7 +215,7 @@ google-api-core[grpc]==2.8.2 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.53.0 +google-api-python-client==2.55.0 # via firebase-admin google-auth==2.9.1 # via @@ -230,7 +230,7 @@ google-auth-httplib2==0.1.0 # via google-api-python-client google-auth-oauthlib==0.5.2 # via gcsfs -google-cloud-bigquery[pandas]==3.2.0 +google-cloud-bigquery[pandas]==3.3.0 # via feast (setup.py) google-cloud-bigquery-storage==2.14.1 # via @@ -265,7 +265,9 @@ googleapis-common-protos==1.56.4 # tensorflow-metadata great-expectations==0.14.13 # via feast (setup.py) -grpcio==1.47.0 +greenlet==1.1.2 + # via sqlalchemy +grpcio==1.48.0 # via # feast (setup.py) # google-api-core @@ -274,13 +276,13 @@ grpcio==1.47.0 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-reflection==1.47.0 +grpcio-reflection==1.48.0 # via feast (setup.py) -grpcio-status==1.47.0 +grpcio-status==1.48.0 # via google-api-core -grpcio-testing==1.47.0 +grpcio-testing==1.48.0 # via feast (setup.py) -grpcio-tools==1.47.0 +grpcio-tools==1.48.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -294,7 +296,7 @@ httplib2==0.20.4 # google-auth-httplib2 httptools==0.4.0 # via uvicorn -identify==2.5.1 +identify==2.5.2 # via pre-commit idna==3.3 # via @@ -306,7 +308,7 @@ imagesize==1.4.1 # via sphinx importlib-metadata==4.12.0 # via great-expectations -importlib-resources==5.8.0 +importlib-resources==5.9.0 # via jsonschema iniconfig==1.1.1 # via pytest @@ -333,7 +335,7 @@ jsonpatch==1.32 # via great-expectations jsonpointer==2.3 # via jsonpatch -jsonschema==4.7.2 +jsonschema==4.8.0 # via # altair # feast (setup.py) @@ -347,6 +349,7 @@ markupsafe==2.1.1 # via # jinja2 # moto + # werkzeug matplotlib-inline==0.1.3 # via ipython mccabe==0.6.1 @@ -381,7 +384,7 @@ multidict==6.0.2 # via # aiohttp # yarl -mypy==0.961 +mypy==0.971 # via # feast (setup.py) # sqlalchemy @@ -631,7 +634,7 @@ requests-oauthlib==1.3.1 # msrest responses==0.21.0 # via moto -rsa==4.8 +rsa==4.9 # via google-auth ruamel-yaml==0.17.17 # via great-expectations @@ -655,7 +658,6 @@ six==1.16.0 # msrestazure # pandavro # python-dateutil - # virtualenv sniffio==1.2.0 # via anyio snowballstemmer==2.2.0 @@ -727,7 +729,7 @@ traitlets==5.3.0 # jupyter-core # matplotlib-inline # nbformat -trino==0.314.0 +trino==0.315.0 # via feast (setup.py) typeguard==2.13.3 # via feast (setup.py) @@ -735,21 +737,21 @@ types-protobuf==3.19.22 # via # feast (setup.py) # mypy-protobuf -types-python-dateutil==2.8.18 +types-python-dateutil==2.8.19 # via feast (setup.py) -types-pytz==2022.1.1 +types-pytz==2022.1.2 # via feast (setup.py) -types-pyyaml==6.0.10 +types-pyyaml==6.0.11 # via feast (setup.py) -types-redis==4.3.4 +types-redis==4.3.11 # via feast (setup.py) -types-requests==2.28.2 +types-requests==2.28.5 # via feast (setup.py) -types-setuptools==63.2.0 +types-setuptools==63.2.2 # via feast (setup.py) types-tabulate==0.8.11 # via feast (setup.py) -types-urllib3==1.26.16 +types-urllib3==1.26.17 # via types-requests typing-extensions==4.3.0 # via @@ -767,7 +769,7 @@ tzlocal==4.2 # via great-expectations uritemplate==4.1.1 # via google-api-python-client -urllib3==1.26.10 +urllib3==1.26.11 # via # botocore # feast (setup.py) @@ -779,11 +781,11 @@ uvicorn[standard]==0.18.2 # via feast (setup.py) uvloop==0.16.0 # via uvicorn -virtualenv==20.15.1 +virtualenv==20.16.2 # via pre-commit volatile==2.1.0 # via bowler -watchfiles==0.15.0 +watchfiles==0.16.1 # via uvicorn wcwidth==0.2.5 # via prompt-toolkit @@ -791,7 +793,7 @@ websocket-client==1.3.3 # via docker websockets==10.3 # via uvicorn -werkzeug==2.1.2 +werkzeug==2.2.1 # via moto wheel==0.37.1 # via pip-tools diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index 3de6ae7e9e..e75d23fe95 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -12,7 +12,7 @@ anyio==3.6.1 # watchfiles appdirs==1.4.4 # via fissix -attrs==21.4.0 +attrs==22.1.0 # via # bowler # jsonschema @@ -40,13 +40,13 @@ dill==0.3.5.1 # via feast (setup.py) fastapi==0.79.0 # via feast (setup.py) -fastavro==1.5.2 +fastavro==1.5.3 # via # feast (setup.py) # pandavro fissix==21.11.13 # via bowler -fsspec==2022.5.0 +fsspec==2022.7.1 # via dask google-api-core==2.8.2 # via feast (setup.py) @@ -57,11 +57,13 @@ googleapis-common-protos==1.56.4 # feast (setup.py) # google-api-core # tensorflow-metadata -grpcio==1.47.0 +greenlet==1.1.2 + # via sqlalchemy +grpcio==1.48.0 # via # feast (setup.py) # grpcio-reflection -grpcio-reflection==1.47.0 +grpcio-reflection==1.48.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -71,11 +73,11 @@ idna==3.3 # via # anyio # requests -importlib-resources==5.8.0 +importlib-resources==5.9.0 # via jsonschema jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.7.2 +jsonschema==4.8.0 # via feast (setup.py) locket==1.0.0 # via partd @@ -85,7 +87,7 @@ mmh3==3.0.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -mypy==0.961 +mypy==0.971 # via sqlalchemy mypy-extensions==0.4.3 # via mypy @@ -146,7 +148,7 @@ pyyaml==6.0 # uvicorn requests==2.28.1 # via google-api-core -rsa==4.8 +rsa==4.9 # via google-auth six==1.16.0 # via @@ -186,7 +188,7 @@ typing-extensions==4.3.0 # pydantic # sqlalchemy2-stubs # starlette -urllib3==1.26.10 +urllib3==1.26.11 # via requests uvicorn[standard]==0.18.2 # via feast (setup.py) @@ -194,7 +196,7 @@ uvloop==0.16.0 # via uvicorn volatile==2.1.0 # via bowler -watchfiles==0.15.0 +watchfiles==0.16.1 # via uvicorn websockets==10.3 # via uvicorn diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 2706348d41..2b43bde544 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -48,7 +48,7 @@ async-timeout==4.0.2 # via # aiohttp # redis -attrs==21.4.0 +attrs==22.1.0 # via # aiohttp # bowler @@ -170,11 +170,11 @@ entrypoints==0.4 # via altair execnet==1.9.0 # via pytest-xdist -executing==0.8.3 +executing==0.9.1 # via stack-data fastapi==0.79.0 # via feast (setup.py) -fastavro==1.5.2 +fastavro==1.5.3 # via # feast (setup.py) # pandavro @@ -211,7 +211,7 @@ google-api-core[grpc]==2.8.2 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.53.0 +google-api-python-client==2.55.0 # via firebase-admin google-auth==2.9.1 # via @@ -226,7 +226,7 @@ google-auth-httplib2==0.1.0 # via google-api-python-client google-auth-oauthlib==0.5.2 # via gcsfs -google-cloud-bigquery[pandas]==3.2.0 +google-cloud-bigquery[pandas]==3.3.0 # via feast (setup.py) google-cloud-bigquery-storage==2.14.1 # via @@ -261,7 +261,9 @@ googleapis-common-protos==1.56.4 # tensorflow-metadata great-expectations==0.14.13 # via feast (setup.py) -grpcio==1.47.0 +greenlet==1.1.2 + # via sqlalchemy +grpcio==1.48.0 # via # feast (setup.py) # google-api-core @@ -270,13 +272,13 @@ grpcio==1.47.0 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-reflection==1.47.0 +grpcio-reflection==1.48.0 # via feast (setup.py) -grpcio-status==1.47.0 +grpcio-status==1.48.0 # via google-api-core -grpcio-testing==1.47.0 +grpcio-testing==1.48.0 # via feast (setup.py) -grpcio-tools==1.47.0 +grpcio-tools==1.48.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -290,7 +292,7 @@ httplib2==0.20.4 # google-auth-httplib2 httptools==0.4.0 # via uvicorn -identify==2.5.1 +identify==2.5.2 # via pre-commit idna==3.3 # via @@ -327,7 +329,7 @@ jsonpatch==1.32 # via great-expectations jsonpointer==2.3 # via jsonpatch -jsonschema==4.7.2 +jsonschema==4.8.0 # via # altair # feast (setup.py) @@ -341,6 +343,7 @@ markupsafe==2.1.1 # via # jinja2 # moto + # werkzeug matplotlib-inline==0.1.3 # via ipython mccabe==0.6.1 @@ -375,7 +378,7 @@ multidict==6.0.2 # via # aiohttp # yarl -mypy==0.961 +mypy==0.971 # via # feast (setup.py) # sqlalchemy @@ -625,12 +628,12 @@ requests-oauthlib==1.3.1 # msrest responses==0.21.0 # via moto -rsa==4.8 +rsa==4.9 # via google-auth -ruamel-yaml==0.17.17 +ruamel.yaml==0.17.17 # via great-expectations -ruamel-yaml-clib==0.2.6 - # via ruamel-yaml +ruamel.yaml.clib==0.2.6 + # via ruamel.yaml s3fs==2022.1.0 # via feast (setup.py) s3transfer==0.5.2 @@ -649,7 +652,6 @@ six==1.16.0 # msrestazure # pandavro # python-dateutil - # virtualenv sniffio==1.2.0 # via anyio snowballstemmer==2.2.0 @@ -721,7 +723,7 @@ traitlets==5.3.0 # jupyter-core # matplotlib-inline # nbformat -trino==0.314.0 +trino==0.315.0 # via feast (setup.py) typeguard==2.13.3 # via feast (setup.py) @@ -729,21 +731,21 @@ types-protobuf==3.19.22 # via # feast (setup.py) # mypy-protobuf -types-python-dateutil==2.8.18 +types-python-dateutil==2.8.19 # via feast (setup.py) -types-pytz==2022.1.1 +types-pytz==2022.1.2 # via feast (setup.py) -types-pyyaml==6.0.10 +types-pyyaml==6.0.11 # via feast (setup.py) -types-redis==4.3.4 +types-redis==4.3.11 # via feast (setup.py) -types-requests==2.28.2 +types-requests==2.28.5 # via feast (setup.py) -types-setuptools==63.2.0 +types-setuptools==63.2.2 # via feast (setup.py) types-tabulate==0.8.11 # via feast (setup.py) -types-urllib3==1.26.16 +types-urllib3==1.26.17 # via types-requests typing-extensions==4.3.0 # via @@ -761,7 +763,7 @@ tzlocal==4.2 # via great-expectations uritemplate==4.1.1 # via google-api-python-client -urllib3==1.26.10 +urllib3==1.26.11 # via # botocore # feast (setup.py) @@ -773,11 +775,11 @@ uvicorn[standard]==0.18.2 # via feast (setup.py) uvloop==0.16.0 # via uvicorn -virtualenv==20.15.1 +virtualenv==20.16.2 # via pre-commit volatile==2.1.0 # via bowler -watchfiles==0.15.0 +watchfiles==0.16.1 # via uvicorn wcwidth==0.2.5 # via prompt-toolkit @@ -785,7 +787,7 @@ websocket-client==1.3.3 # via docker websockets==10.3 # via uvicorn -werkzeug==2.1.2 +werkzeug==2.2.1 # via moto wheel==0.37.1 # via pip-tools diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index d015b3aa1c..593e080725 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -12,7 +12,7 @@ anyio==3.6.1 # watchfiles appdirs==1.4.4 # via fissix -attrs==21.4.0 +attrs==22.1.0 # via # bowler # jsonschema @@ -40,13 +40,13 @@ dill==0.3.5.1 # via feast (setup.py) fastapi==0.79.0 # via feast (setup.py) -fastavro==1.5.2 +fastavro==1.5.3 # via # feast (setup.py) # pandavro fissix==21.11.13 # via bowler -fsspec==2022.5.0 +fsspec==2022.7.1 # via dask google-api-core==2.8.2 # via feast (setup.py) @@ -57,11 +57,13 @@ googleapis-common-protos==1.56.4 # feast (setup.py) # google-api-core # tensorflow-metadata -grpcio==1.47.0 +greenlet==1.1.2 + # via sqlalchemy +grpcio==1.48.0 # via # feast (setup.py) # grpcio-reflection -grpcio-reflection==1.47.0 +grpcio-reflection==1.48.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -73,7 +75,7 @@ idna==3.3 # requests jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.7.2 +jsonschema==4.8.0 # via feast (setup.py) locket==1.0.0 # via partd @@ -83,7 +85,7 @@ mmh3==3.0.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -mypy==0.961 +mypy==0.971 # via sqlalchemy mypy-extensions==0.4.3 # via mypy @@ -144,7 +146,7 @@ pyyaml==6.0 # uvicorn requests==2.28.1 # via google-api-core -rsa==4.8 +rsa==4.9 # via google-auth six==1.16.0 # via @@ -184,7 +186,7 @@ typing-extensions==4.3.0 # pydantic # sqlalchemy2-stubs # starlette -urllib3==1.26.10 +urllib3==1.26.11 # via requests uvicorn[standard]==0.18.2 # via feast (setup.py) @@ -192,7 +194,7 @@ uvloop==0.16.0 # via uvicorn volatile==2.1.0 # via bowler -watchfiles==0.15.0 +watchfiles==0.16.1 # via uvicorn websockets==10.3 # via uvicorn diff --git a/setup.py b/setup.py index e9d7941445..f03aeefcf6 100644 --- a/setup.py +++ b/setup.py @@ -130,8 +130,8 @@ "flake8", "black>=22.6.0,<23", "isort>=5,<6", - "grpcio-tools==1.47.0", - "grpcio-testing==1.47.0", + "grpcio-tools>=1.47.0", + "grpcio-testing>=1.47.0", "minio==7.1.0", "mock==2.0.0", "moto", @@ -526,8 +526,8 @@ def copy_extensions_to_source(self): use_scm_version=use_scm_version, setup_requires=[ "setuptools_scm", - "grpcio==1.47.0", - "grpcio-tools==1.47.0", + "grpcio>=1.47.0", + "grpcio-tools>=1.47.0", "mypy-protobuf==3.1", "pybindgen==0.22.0", "sphinx!=4.0.0", From 2680f7b031717b64e6ea3addf150369dccebdbc1 Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Fri, 29 Jul 2022 13:30:01 -0700 Subject: [PATCH 64/73] fix: Refactor testing and sort out unit and integration tests (#2975) * Refactor go feature server Signed-off-by: Kevin Zhang * Fix lint Signed-off-by: Kevin Zhang * Fix e2e tests Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Verify tests Signed-off-by: Kevin Zhang * Fix lint Signed-off-by: Kevin Zhang * Address review Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Address review Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix lint Signed-off-by: Kevin Zhang * address review Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix lint Signed-off-by: Kevin Zhang * Fix lint Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fix lint Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Refactor Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Fx lit Signed-off-by: Kevin Zhang * Fix lint Signed-off-by: Kevin Zhang * update fix Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * Revert Signed-off-by: Kevin Zhang * Fix Signed-off-by: Kevin Zhang * fix Signed-off-by: Kevin Zhang * Fix lint Signed-off-by: Kevin Zhang --- sdk/python/tests/conftest.py | 20 +- .../integration/e2e/test_go_feature_server.py | 215 +++---- .../e2e/test_python_feature_server.py | 10 +- .../integration/e2e/test_universal_e2e.py | 203 +++--- .../tests/integration/e2e/test_usage_e2e.py | 2 + .../tests/integration/e2e/test_validation.py | 139 ++--- .../materialization/test_lambda.py | 148 +---- .../offline_store/test_feature_logging.py | 2 +- ...=> test_push_features_to_offline_store.py} | 0 .../test_universal_historical_retrieval.py | 444 +------------- .../online_store/test_online_retrieval.py | 2 +- ... => test_push_features_to_online_store.py} | 0 .../online_store/test_universal_online.py | 2 +- .../integration/registration/test_cli.py | 365 ----------- .../test_feature_service_apply.py | 29 - .../registration/test_feature_store.py | 321 ++-------- .../registration/test_inference.py | 397 +----------- .../integration/registration/test_registry.py | 576 +----------------- .../test_stream_feature_view_apply.py | 153 ----- .../registration/test_universal_cli.py | 166 +++++ .../registration/test_universal_types.py | 182 +++--- .../scaffolding/test_partial_apply.py | 48 -- sdk/python/tests/unit/cli/test_cli.py | 140 +++++ .../cli}/test_cli_apply_duplicates.py | 2 +- .../cli}/test_cli_chdir.py | 2 +- .../tests/unit/diff/test_registry_diff.py | 2 +- .../test_dynamodb_online_store.py | 42 +- .../infra}/scaffolding/test_repo_config.py | 0 .../scaffolding/test_repo_operations.py | 0 .../unit/infra/test_inference_unit_tests.py | 382 ++++++++++++ .../tests/unit/infra/test_local_registry.py | 535 ++++++++++++++++ .../local_feast_tests}/test_e2e_local.py | 165 ++--- .../test_feature_service_read.py | 4 +- .../local_feast_tests}/test_init.py | 2 +- .../test_local_feature_store.py | 266 ++++++++ .../test_stream_feature_view_apply.py | 189 ++++++ sdk/python/tests/unit/test_feature_service.py | 15 +- sdk/python/tests/unit/test_proto_json.py | 10 +- ...write_test.py => basic_read_write_test.py} | 0 .../{cli_utils.py => cli_repo_creator.py} | 0 ...e_utils.py => data_source_test_creator.py} | 0 ...store_utils.py => dynamo_table_creator.py} | 8 +- sdk/python/tests/utils/e2e_test_validation.py | 277 +++++++++ sdk/python/tests/utils/feature_records.py | 496 +++++++++++++++ sdk/python/tests/utils/http_server.py | 13 + ...logged_features.py => test_log_creator.py} | 60 +- sdk/python/tests/utils/test_wrappers.py | 14 + 47 files changed, 3027 insertions(+), 3021 deletions(-) rename sdk/python/tests/integration/offline_store/{test_push_offline.py => test_push_features_to_offline_store.py} (100%) rename sdk/python/tests/integration/online_store/{test_push_online.py => test_push_features_to_online_store.py} (100%) delete mode 100644 sdk/python/tests/integration/registration/test_cli.py delete mode 100644 sdk/python/tests/integration/registration/test_feature_service_apply.py delete mode 100644 sdk/python/tests/integration/registration/test_stream_feature_view_apply.py create mode 100644 sdk/python/tests/integration/registration/test_universal_cli.py delete mode 100644 sdk/python/tests/integration/scaffolding/test_partial_apply.py create mode 100644 sdk/python/tests/unit/cli/test_cli.py rename sdk/python/tests/{integration/registration => unit/cli}/test_cli_apply_duplicates.py (98%) rename sdk/python/tests/{integration/registration => unit/cli}/test_cli_chdir.py (97%) rename sdk/python/tests/{integration => unit/infra}/scaffolding/test_repo_config.py (100%) rename sdk/python/tests/{integration => unit/infra}/scaffolding/test_repo_operations.py (100%) create mode 100644 sdk/python/tests/unit/infra/test_inference_unit_tests.py create mode 100644 sdk/python/tests/unit/infra/test_local_registry.py rename sdk/python/tests/{integration/online_store => unit/local_feast_tests}/test_e2e_local.py (53%) rename sdk/python/tests/{integration/online_store => unit/local_feast_tests}/test_feature_service_read.py (78%) rename sdk/python/tests/{integration/scaffolding => unit/local_feast_tests}/test_init.py (97%) create mode 100644 sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py create mode 100644 sdk/python/tests/unit/local_feast_tests/test_stream_feature_view_apply.py rename sdk/python/tests/utils/{online_read_write_test.py => basic_read_write_test.py} (100%) rename sdk/python/tests/utils/{cli_utils.py => cli_repo_creator.py} (100%) rename sdk/python/tests/utils/{data_source_utils.py => data_source_test_creator.py} (100%) rename sdk/python/tests/utils/{online_store_utils.py => dynamo_table_creator.py} (89%) create mode 100644 sdk/python/tests/utils/e2e_test_validation.py create mode 100644 sdk/python/tests/utils/feature_records.py create mode 100644 sdk/python/tests/utils/http_server.py rename sdk/python/tests/utils/{logged_features.py => test_log_creator.py} (60%) create mode 100644 sdk/python/tests/utils/test_wrappers.py diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index 06b77f13b3..b4bcccd9c6 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -14,8 +14,6 @@ import logging import multiprocessing import os -import socket -from contextlib import closing from datetime import datetime, timedelta from multiprocessing import Process from sys import platform @@ -45,6 +43,7 @@ from tests.integration.feature_repos.universal.data_sources.file import ( # noqa: E402 FileDataSourceCreator, ) +from tests.utils.http_server import check_port_open, free_port # noqa: E402 logger = logging.getLogger(__name__) @@ -327,7 +326,7 @@ def feature_server_endpoint(environment): yield environment.feature_store.get_feature_server_endpoint() return - port = _free_port() + port = free_port() proc = Process( target=start_test_local_server, @@ -340,7 +339,7 @@ def feature_server_endpoint(environment): proc.start() # Wait for server to start wait_retry_backoff( - lambda: (None, _check_port_open("localhost", port)), + lambda: (None, check_port_open("localhost", port)), timeout_secs=10, ) @@ -353,23 +352,12 @@ def feature_server_endpoint(environment): wait_retry_backoff( lambda: ( None, - not _check_port_open("localhost", environment.get_local_server_port()), + not check_port_open("localhost", environment.get_local_server_port()), ), timeout_secs=30, ) -def _check_port_open(host, port) -> bool: - with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: - return sock.connect_ex((host, port)) == 0 - - -def _free_port(): - sock = socket.socket() - sock.bind(("", 0)) - return sock.getsockname()[1] - - @pytest.fixture def universal_data_sources(environment) -> TestData: return construct_universal_test_data(environment) diff --git a/sdk/python/tests/integration/e2e/test_go_feature_server.py b/sdk/python/tests/integration/e2e/test_go_feature_server.py index dc83246c2d..0f972e45df 100644 --- a/sdk/python/tests/integration/e2e/test_go_feature_server.py +++ b/sdk/python/tests/integration/e2e/test_go_feature_server.py @@ -1,7 +1,5 @@ -import socket import threading import time -from contextlib import closing from datetime import datetime from typing import List @@ -11,10 +9,10 @@ import pytz import requests -from feast import FeatureService, FeatureView, ValueType from feast.embedded_go.online_features_service import EmbeddedOnlineFeatureServer from feast.feast_object import FeastObject from feast.feature_logging import LoggingConfig +from feast.feature_service import FeatureService from feast.infra.feature_servers.base_config import FeatureLoggingConfig from feast.protos.feast.serving.ServingService_pb2 import ( FieldStatus, @@ -24,6 +22,7 @@ from feast.protos.feast.serving.ServingService_pb2_grpc import ServingServiceStub from feast.protos.feast.types.Value_pb2 import RepeatedValue from feast.type_map import python_values_to_proto_values +from feast.value_type import ValueType from feast.wait import wait_retry_backoff from tests.integration.feature_repos.repo_configuration import ( construct_universal_feature_views, @@ -33,94 +32,8 @@ driver, location, ) - - -@pytest.fixture -def initialized_registry(environment, universal_data_sources): - fs = environment.feature_store - - _, _, data_sources = universal_data_sources - feature_views = construct_universal_feature_views(data_sources) - - feature_service = FeatureService( - name="driver_features", - features=[feature_views.driver], - logging_config=LoggingConfig( - destination=environment.data_source_creator.create_logged_features_destination(), - sample_rate=1.0, - ), - ) - feast_objects: List[FeastObject] = [feature_service] - feast_objects.extend(feature_views.values()) - feast_objects.extend([driver(), customer(), location()]) - - fs.apply(feast_objects) - fs.materialize(environment.start_date, environment.end_date) - - -def server_port(environment, server_type: str): - if not environment.test_repo_config.go_feature_serving: - pytest.skip("Only for Go path") - - fs = environment.feature_store - - embedded = EmbeddedOnlineFeatureServer( - repo_path=str(fs.repo_path.absolute()), - repo_config=fs.config, - feature_store=fs, - ) - port = free_port() - if server_type == "grpc": - target = embedded.start_grpc_server - elif server_type == "http": - target = embedded.start_http_server - else: - raise ValueError("Server Type must be either 'http' or 'grpc'") - - t = threading.Thread( - target=target, - args=("127.0.0.1", port), - kwargs=dict( - enable_logging=True, - logging_options=FeatureLoggingConfig( - enabled=True, - queue_capacity=100, - write_to_disk_interval_secs=1, - flush_interval_secs=1, - emit_timeout_micro_secs=10000, - ), - ), - ) - t.start() - - wait_retry_backoff( - lambda: (None, check_port_open("127.0.0.1", port)), timeout_secs=15 - ) - - yield port - if server_type == "grpc": - embedded.stop_grpc_server() - else: - embedded.stop_http_server() - - # wait for graceful stop - time.sleep(5) - - -@pytest.fixture -def grpc_server_port(environment, initialized_registry): - yield from server_port(environment, "grpc") - - -@pytest.fixture -def http_server_port(environment, initialized_registry): - yield from server_port(environment, "http") - - -@pytest.fixture -def grpc_client(grpc_server_port): - ch = grpc.insecure_channel(f"localhost:{grpc_server_port}") - yield ServingServiceStub(ch) +from tests.utils.http_server import check_port_open, free_port +from tests.utils.test_log_creator import generate_expected_logs, get_latest_rows @pytest.mark.integration @@ -254,43 +167,97 @@ def retrieve(): pd.testing.assert_frame_equal(expected_logs, persisted_logs, check_dtype=False) -def free_port(): - sock = socket.socket() - sock.bind(("", 0)) - return sock.getsockname()[1] +""" +Start go feature server either on http or grpc based on the repo configuration for testing. +""" -def check_port_open(host, port) -> bool: - with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: - return sock.connect_ex((host, port)) == 0 +def _server_port(environment, server_type: str): + if not environment.test_repo_config.go_feature_serving: + pytest.skip("Only for Go path") + fs = environment.feature_store -def get_latest_rows(df, join_key, entity_values): - rows = df[df[join_key].isin(entity_values)] - return rows.loc[rows.groupby(join_key)["event_timestamp"].idxmax()] + embedded = EmbeddedOnlineFeatureServer( + repo_path=str(fs.repo_path.absolute()), + repo_config=fs.config, + feature_store=fs, + ) + port = free_port() + if server_type == "grpc": + target = embedded.start_grpc_server + elif server_type == "http": + target = embedded.start_http_server + else: + raise ValueError("Server Type must be either 'http' or 'grpc'") + + t = threading.Thread( + target=target, + args=("127.0.0.1", port), + kwargs=dict( + enable_logging=True, + logging_options=FeatureLoggingConfig( + enabled=True, + queue_capacity=100, + write_to_disk_interval_secs=1, + flush_interval_secs=1, + emit_timeout_micro_secs=10000, + ), + ), + ) + t.start() + wait_retry_backoff( + lambda: (None, check_port_open("127.0.0.1", port)), timeout_secs=15 + ) -def generate_expected_logs( - df: pd.DataFrame, - feature_view: FeatureView, - features: List[str], - join_keys: List[str], - timestamp_column: str, -): - logs = pd.DataFrame() - for join_key in join_keys: - logs[join_key] = df[join_key] - - for feature in features: - col = f"{feature_view.name}__{feature}" - logs[col] = df[feature] - logs[f"{col}__timestamp"] = df[timestamp_column] - logs[f"{col}__status"] = FieldStatus.PRESENT - if feature_view.ttl: - logs[f"{col}__status"] = logs[f"{col}__status"].mask( - df[timestamp_column] - < datetime.utcnow().replace(tzinfo=pytz.UTC) - feature_view.ttl, - FieldStatus.OUTSIDE_MAX_AGE, - ) + yield port + if server_type == "grpc": + embedded.stop_grpc_server() + else: + embedded.stop_http_server() - return logs.sort_values(by=join_keys).reset_index(drop=True) + # wait for graceful stop + time.sleep(5) + + +# Go test fixtures + + +@pytest.fixture +def initialized_registry(environment, universal_data_sources): + fs = environment.feature_store + + _, _, data_sources = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + feature_service = FeatureService( + name="driver_features", + features=[feature_views.driver], + logging_config=LoggingConfig( + destination=environment.data_source_creator.create_logged_features_destination(), + sample_rate=1.0, + ), + ) + feast_objects: List[FeastObject] = [feature_service] + feast_objects.extend(feature_views.values()) + feast_objects.extend([driver(), customer(), location()]) + + fs.apply(feast_objects) + fs.materialize(environment.start_date, environment.end_date) + + +@pytest.fixture +def grpc_server_port(environment, initialized_registry): + yield from _server_port(environment, "grpc") + + +@pytest.fixture +def http_server_port(environment, initialized_registry): + yield from _server_port(environment, "http") + + +@pytest.fixture +def grpc_client(grpc_server_port): + ch = grpc.insecure_channel(f"localhost:{grpc_server_port}") + yield ServingServiceStub(ch) diff --git a/sdk/python/tests/integration/e2e/test_python_feature_server.py b/sdk/python/tests/integration/e2e/test_python_feature_server.py index 97b9693391..9c61f6fa19 100644 --- a/sdk/python/tests/integration/e2e/test_python_feature_server.py +++ b/sdk/python/tests/integration/e2e/test_python_feature_server.py @@ -58,7 +58,9 @@ def test_get_online_features(python_fs_client): @pytest.mark.integration @pytest.mark.universal_online_stores def test_push(python_fs_client): - initial_temp = get_temperatures(python_fs_client, location_ids=[1])[0] + initial_temp = _get_temperatures_from_feature_server( + python_fs_client, location_ids=[1] + )[0] json_data = json.dumps( { "push_source_name": "location_stats_push_source", @@ -77,10 +79,12 @@ def test_push(python_fs_client): # Check new pushed temperature is fetched assert response.status_code == 200 - assert get_temperatures(python_fs_client, location_ids=[1]) == [initial_temp * 100] + assert _get_temperatures_from_feature_server( + python_fs_client, location_ids=[1] + ) == [initial_temp * 100] -def get_temperatures(client, location_ids: List[int]): +def _get_temperatures_from_feature_server(client, location_ids: List[int]): get_request_data = { "features": ["pushable_location_stats:temperature"], "entities": {"location_id": location_ids}, diff --git a/sdk/python/tests/integration/e2e/test_universal_e2e.py b/sdk/python/tests/integration/e2e/test_universal_e2e.py index a42a96e594..5dc0c042d9 100644 --- a/sdk/python/tests/integration/e2e/test_universal_e2e.py +++ b/sdk/python/tests/integration/e2e/test_universal_e2e.py @@ -1,14 +1,15 @@ -import math -from datetime import datetime, timedelta -from typing import Optional +from datetime import timedelta -import pandas as pd import pytest -from pytz import utc -from feast import FeatureStore, FeatureView +from feast import BigQuerySource, Entity, FeatureView, Field +from feast.feature_service import FeatureService +from feast.types import Float32, String from tests.integration.feature_repos.universal.entities import driver from tests.integration.feature_repos.universal.feature_views import driver_feature_view +from tests.utils.basic_read_write_test import basic_rw_test +from tests.utils.cli_repo_creator import CliRunner, get_example_repo +from tests.utils.e2e_test_validation import validate_offline_online_store_consistency @pytest.mark.integration @@ -30,133 +31,69 @@ def test_e2e_consistency(environment, e2e_data_sources, infer_features): # we use timestamp from generated dataframe as a split point split_dt = df["ts_1"][4].to_pydatetime() - timedelta(seconds=1) - run_offline_online_store_consistency_test(fs, fv, split_dt) - - -def check_offline_and_online_features( - fs: FeatureStore, - fv: FeatureView, - driver_id: int, - event_timestamp: datetime, - expected_value: Optional[float], - full_feature_names: bool, - check_offline_store: bool = True, -) -> None: - # Check online store - response_dict = fs.get_online_features( - [f"{fv.name}:value"], - [{"driver_id": driver_id}], - full_feature_names=full_feature_names, - ).to_dict() - - if full_feature_names: - - if expected_value: - assert response_dict[f"{fv.name}__value"][0], f"Response: {response_dict}" - assert ( - abs(response_dict[f"{fv.name}__value"][0] - expected_value) < 1e-6 - ), f"Response: {response_dict}, Expected: {expected_value}" - else: - assert response_dict[f"{fv.name}__value"][0] is None - else: - if expected_value: - assert response_dict["value"][0], f"Response: {response_dict}" - assert ( - abs(response_dict["value"][0] - expected_value) < 1e-6 - ), f"Response: {response_dict}, Expected: {expected_value}" - else: - assert response_dict["value"][0] is None - - # Check offline store - if check_offline_store: - df = fs.get_historical_features( - entity_df=pd.DataFrame.from_dict( - {"driver_id": [driver_id], "event_timestamp": [event_timestamp]} - ), - features=[f"{fv.name}:value"], - full_feature_names=full_feature_names, - ).to_df() - - if full_feature_names: - if expected_value: - assert ( - abs( - df.to_dict(orient="list")[f"{fv.name}__value"][0] - - expected_value - ) - < 1e-6 - ) - else: - assert not df.to_dict(orient="list")[f"{fv.name}__value"] or math.isnan( - df.to_dict(orient="list")[f"{fv.name}__value"][0] - ) - else: - if expected_value: - assert ( - abs(df.to_dict(orient="list")["value"][0] - expected_value) < 1e-6 - ) - else: - assert not df.to_dict(orient="list")["value"] or math.isnan( - df.to_dict(orient="list")["value"][0] - ) - - -def run_offline_online_store_consistency_test( - fs: FeatureStore, fv: FeatureView, split_dt: datetime -) -> None: - now = datetime.utcnow() - - full_feature_names = True - check_offline_store: bool = True - - # Run materialize() - # use both tz-naive & tz-aware timestamps to test that they're both correctly handled - start_date = (now - timedelta(hours=5)).replace(tzinfo=utc) - end_date = split_dt - fs.materialize(feature_views=[fv.name], start_date=start_date, end_date=end_date) - - # check result of materialize() - check_offline_and_online_features( - fs=fs, - fv=fv, - driver_id=1, - event_timestamp=end_date, - expected_value=0.3, - full_feature_names=full_feature_names, - check_offline_store=check_offline_store, - ) + validate_offline_online_store_consistency(fs, fv, split_dt) - check_offline_and_online_features( - fs=fs, - fv=fv, - driver_id=2, - event_timestamp=end_date, - expected_value=None, - full_feature_names=full_feature_names, - check_offline_store=check_offline_store, - ) - # check prior value for materialize_incremental() - check_offline_and_online_features( - fs=fs, - fv=fv, - driver_id=3, - event_timestamp=end_date, - expected_value=4, - full_feature_names=full_feature_names, - check_offline_store=check_offline_store, - ) +@pytest.mark.integration +def test_partial() -> None: + """ + Add another table to existing repo using partial apply API. Make sure both the table + applied via CLI apply and the new table are passing RW test. + """ + + runner = CliRunner() + with runner.local_repo( + get_example_repo("example_feature_repo_1.py"), "bigquery" + ) as store: + driver = Entity(name="driver", join_keys=["test"]) + + driver_locations_source = BigQuerySource( + table="feast-oss.public.drivers", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", + ) + + driver_locations_100 = FeatureView( + name="driver_locations_100", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="lat", dtype=Float32), + Field(name="lon", dtype=String), + Field(name="name", dtype=String), + Field(name="test", dtype=String), + ], + online=True, + batch_source=driver_locations_source, + tags={}, + ) + + store.apply([driver_locations_100]) + + basic_rw_test(store, view_name="driver_locations") + basic_rw_test(store, view_name="driver_locations_100") - # run materialize_incremental() - fs.materialize_incremental(feature_views=[fv.name], end_date=now) - - # check result of materialize_incremental() - check_offline_and_online_features( - fs=fs, - fv=fv, - driver_id=3, - event_timestamp=now, - expected_value=5, - full_feature_names=full_feature_names, - check_offline_store=check_offline_store, - ) + +@pytest.mark.integration +def test_read_pre_applied() -> None: + """ + Read feature values from the FeatureStore using a FeatureService. + """ + runner = CliRunner() + with runner.local_repo( + get_example_repo("example_feature_repo_1.py"), "bigquery" + ) as store: + + assert len(store.list_feature_services()) == 1 + fs = store.get_feature_service("driver_locations_service") + assert len(fs.tags) == 1 + assert fs.tags["release"] == "production" + + fv = store.get_feature_view("driver_locations") + + fs = FeatureService(name="new_feature_service", features=[fv[["lon"]]]) + + store.apply([fs]) + + assert len(store.list_feature_services()) == 2 + store.get_feature_service("new_feature_service") diff --git a/sdk/python/tests/integration/e2e/test_usage_e2e.py b/sdk/python/tests/integration/e2e/test_usage_e2e.py index 53e4a32a82..5c95bd50b1 100644 --- a/sdk/python/tests/integration/e2e/test_usage_e2e.py +++ b/sdk/python/tests/integration/e2e/test_usage_e2e.py @@ -11,6 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +# This file tests our usage tracking system in `usage.py`. import os import sys import tempfile diff --git a/sdk/python/tests/integration/e2e/test_validation.py b/sdk/python/tests/integration/e2e/test_validation.py index b8908663b3..7062948f53 100644 --- a/sdk/python/tests/integration/e2e/test_validation.py +++ b/sdk/python/tests/integration/e2e/test_validation.py @@ -26,8 +26,8 @@ driver, location, ) -from tests.utils.cli_utils import CliRunner -from tests.utils.logged_features import prepare_logs +from tests.utils.cli_repo_creator import CliRunner +from tests.utils.test_log_creator import prepare_logs _features = [ "customer_profile:current_balance", @@ -39,72 +39,6 @@ ] -@ge_profiler -def configurable_profiler(dataset: PandasDataset) -> ExpectationSuite: - from great_expectations.profile.user_configurable_profiler import ( - UserConfigurableProfiler, - ) - - return UserConfigurableProfiler( - profile_dataset=dataset, - ignored_columns=["event_timestamp"], - excluded_expectations=[ - "expect_table_columns_to_match_ordered_list", - "expect_table_row_count_to_be_between", - ], - value_set_threshold="few", - ).build_suite() - - -@ge_profiler(with_feature_metadata=True) -def profiler_with_feature_metadata(dataset: PandasDataset) -> ExpectationSuite: - from great_expectations.profile.user_configurable_profiler import ( - UserConfigurableProfiler, - ) - - # always present - dataset.expect_column_values_to_be_in_set( - "global_stats__avg_ride_length__status", {FieldStatus.PRESENT} - ) - - # present at least in 70% of rows - dataset.expect_column_values_to_be_in_set( - "customer_profile__current_balance__status", {FieldStatus.PRESENT}, mostly=0.7 - ) - - return UserConfigurableProfiler( - profile_dataset=dataset, - ignored_columns=["event_timestamp"] - + [ - c - for c in dataset.columns - if c.endswith("__timestamp") or c.endswith("__status") - ], - excluded_expectations=[ - "expect_table_columns_to_match_ordered_list", - "expect_table_row_count_to_be_between", - ], - value_set_threshold="few", - ).build_suite() - - -@ge_profiler -def profiler_with_unrealistic_expectations(dataset: PandasDataset) -> ExpectationSuite: - # need to create dataframe with corrupted data first - df = pd.DataFrame() - df["current_balance"] = [-100] - df["avg_passenger_count"] = [0] - - other_ds = PandasDataset(df) - other_ds.expect_column_max_to_be_between("current_balance", -1000, -100) - other_ds.expect_column_values_to_be_in_set("avg_passenger_count", value_set={0}) - - # this should pass - other_ds.expect_column_min_to_be_between("avg_passenger_count", 0, 1000) - - return other_ds.get_expectation_suite() - - @pytest.mark.integration @pytest.mark.universal_offline_stores def test_historical_retrieval_with_validation(environment, universal_data_sources): @@ -357,3 +291,72 @@ def test_e2e_validation_via_cli(environment, universal_data_sources): p = runner.run(validate_args, cwd=local_repo.repo_path) assert p.returncode == 1, p.stdout.decode() assert "Validation failed" in p.stdout.decode(), p.stderr.decode() + + +# Great expectations profilers created for testing + + +@ge_profiler +def configurable_profiler(dataset: PandasDataset) -> ExpectationSuite: + from great_expectations.profile.user_configurable_profiler import ( + UserConfigurableProfiler, + ) + + return UserConfigurableProfiler( + profile_dataset=dataset, + ignored_columns=["event_timestamp"], + excluded_expectations=[ + "expect_table_columns_to_match_ordered_list", + "expect_table_row_count_to_be_between", + ], + value_set_threshold="few", + ).build_suite() + + +@ge_profiler(with_feature_metadata=True) +def profiler_with_feature_metadata(dataset: PandasDataset) -> ExpectationSuite: + from great_expectations.profile.user_configurable_profiler import ( + UserConfigurableProfiler, + ) + + # always present + dataset.expect_column_values_to_be_in_set( + "global_stats__avg_ride_length__status", {FieldStatus.PRESENT} + ) + + # present at least in 70% of rows + dataset.expect_column_values_to_be_in_set( + "customer_profile__current_balance__status", {FieldStatus.PRESENT}, mostly=0.7 + ) + + return UserConfigurableProfiler( + profile_dataset=dataset, + ignored_columns=["event_timestamp"] + + [ + c + for c in dataset.columns + if c.endswith("__timestamp") or c.endswith("__status") + ], + excluded_expectations=[ + "expect_table_columns_to_match_ordered_list", + "expect_table_row_count_to_be_between", + ], + value_set_threshold="few", + ).build_suite() + + +@ge_profiler +def profiler_with_unrealistic_expectations(dataset: PandasDataset) -> ExpectationSuite: + # need to create dataframe with corrupted data first + df = pd.DataFrame() + df["current_balance"] = [-100] + df["avg_passenger_count"] = [0] + + other_ds = PandasDataset(df) + other_ds.expect_column_max_to_be_between("current_balance", -1000, -100) + other_ds.expect_column_values_to_be_in_set("avg_passenger_count", value_set={0}) + + # this should pass + other_ds.expect_column_min_to_be_between("avg_passenger_count", 0, 1000) + + return other_ds.get_expectation_suite() diff --git a/sdk/python/tests/integration/materialization/test_lambda.py b/sdk/python/tests/integration/materialization/test_lambda.py index 4a259fd365..8ffd31e0cd 100644 --- a/sdk/python/tests/integration/materialization/test_lambda.py +++ b/sdk/python/tests/integration/materialization/test_lambda.py @@ -1,13 +1,11 @@ -import math -import time -from datetime import datetime, timedelta -from typing import Optional +from datetime import timedelta -import pandas as pd import pytest -from pytz import utc -from feast import Entity, Feature, FeatureStore, FeatureView, ValueType +from feast.entity import Entity +from feast.feature import Feature +from feast.feature_view import FeatureView +from feast.types import ValueType from tests.data.data_creator import create_basic_driver_dataset from tests.integration.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, @@ -19,10 +17,11 @@ from tests.integration.feature_repos.universal.data_sources.redshift import ( RedshiftDataSourceCreator, ) +from tests.utils.e2e_test_validation import validate_offline_online_store_consistency @pytest.mark.integration -def test_lambda_materialization(): +def test_lambda_materialization_consistency(): lambda_config = IntegrationTestRepoConfig( provider="aws", online_store={"type": "dynamodb", "region": "us-west-2"}, @@ -70,137 +69,6 @@ def test_lambda_materialization(): print(f"Split datetime: {split_dt}") - run_offline_online_store_consistency_test(fs, driver_stats_fv, split_dt) + validate_offline_online_store_consistency(fs, driver_stats_fv, split_dt) finally: fs.teardown() - - -def check_offline_and_online_features( - fs: FeatureStore, - fv: FeatureView, - driver_id: int, - event_timestamp: datetime, - expected_value: Optional[float], - full_feature_names: bool, - check_offline_store: bool = True, -) -> None: - # Check online store - response_dict = fs.get_online_features( - [f"{fv.name}:value"], - [{"driver_id": driver_id}], - full_feature_names=full_feature_names, - ).to_dict() - - if full_feature_names: - - if expected_value: - assert response_dict[f"{fv.name}__value"][0], f"Response: {response_dict}" - assert ( - abs(response_dict[f"{fv.name}__value"][0] - expected_value) < 1e-6 - ), f"Response: {response_dict}, Expected: {expected_value}" - else: - assert response_dict[f"{fv.name}__value"][0] is None - else: - if expected_value: - assert response_dict["value"][0], f"Response: {response_dict}" - assert ( - abs(response_dict["value"][0] - expected_value) < 1e-6 - ), f"Response: {response_dict}, Expected: {expected_value}" - else: - assert response_dict["value"][0] is None - - # Check offline store - if check_offline_store: - df = fs.get_historical_features( - entity_df=pd.DataFrame.from_dict( - {"driver_id": [driver_id], "event_timestamp": [event_timestamp]} - ), - features=[f"{fv.name}:value"], - full_feature_names=full_feature_names, - ).to_df() - - if full_feature_names: - if expected_value: - assert ( - abs( - df.to_dict(orient="list")[f"{fv.name}__value"][0] - - expected_value - ) - < 1e-6 - ) - else: - assert not df.to_dict(orient="list")[f"{fv.name}__value"] or math.isnan( - df.to_dict(orient="list")[f"{fv.name}__value"][0] - ) - else: - if expected_value: - assert ( - abs(df.to_dict(orient="list")["value"][0] - expected_value) < 1e-6 - ) - else: - assert not df.to_dict(orient="list")["value"] or math.isnan( - df.to_dict(orient="list")["value"][0] - ) - - -def run_offline_online_store_consistency_test( - fs: FeatureStore, fv: FeatureView, split_dt: datetime -) -> None: - now = datetime.utcnow() - - full_feature_names = True - check_offline_store: bool = True - - # Run materialize() - # use both tz-naive & tz-aware timestamps to test that they're both correctly handled - start_date = (now - timedelta(hours=5)).replace(tzinfo=utc) - end_date = split_dt - fs.materialize(feature_views=[fv.name], start_date=start_date, end_date=end_date) - - time.sleep(10) - - # check result of materialize() - check_offline_and_online_features( - fs=fs, - fv=fv, - driver_id=1, - event_timestamp=end_date, - expected_value=0.3, - full_feature_names=full_feature_names, - check_offline_store=check_offline_store, - ) - - check_offline_and_online_features( - fs=fs, - fv=fv, - driver_id=2, - event_timestamp=end_date, - expected_value=None, - full_feature_names=full_feature_names, - check_offline_store=check_offline_store, - ) - - # check prior value for materialize_incremental() - check_offline_and_online_features( - fs=fs, - fv=fv, - driver_id=3, - event_timestamp=end_date, - expected_value=4, - full_feature_names=full_feature_names, - check_offline_store=check_offline_store, - ) - - # run materialize_incremental() - fs.materialize_incremental(feature_views=[fv.name], end_date=now) - - # check result of materialize_incremental() - check_offline_and_online_features( - fs=fs, - fv=fv, - driver_id=3, - event_timestamp=now, - expected_value=5, - full_feature_names=full_feature_names, - check_offline_store=check_offline_store, - ) diff --git a/sdk/python/tests/integration/offline_store/test_feature_logging.py b/sdk/python/tests/integration/offline_store/test_feature_logging.py index 5d74ee284c..eba994544d 100644 --- a/sdk/python/tests/integration/offline_store/test_feature_logging.py +++ b/sdk/python/tests/integration/offline_store/test_feature_logging.py @@ -22,7 +22,7 @@ location, ) from tests.integration.feature_repos.universal.feature_views import conv_rate_plus_100 -from tests.utils.logged_features import prepare_logs, to_logs_dataset +from tests.utils.test_log_creator import prepare_logs, to_logs_dataset @pytest.mark.integration diff --git a/sdk/python/tests/integration/offline_store/test_push_offline.py b/sdk/python/tests/integration/offline_store/test_push_features_to_offline_store.py similarity index 100% rename from sdk/python/tests/integration/offline_store/test_push_offline.py rename to sdk/python/tests/integration/offline_store/test_push_features_to_offline_store.py diff --git a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py index 87bf59fe9f..718b7577d9 100644 --- a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py +++ b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py @@ -1,20 +1,13 @@ import random import time from datetime import datetime, timedelta -from typing import Any, Dict, List, Optional import numpy as np import pandas as pd import pytest -from pandas.testing import assert_frame_equal as pd_assert_frame_equal -from pytz import utc -from feast import utils from feast.entity import Entity -from feast.errors import ( - FeatureNameCollisionError, - RequestDataNotFoundInEntityDfException, -) +from feast.errors import RequestDataNotFoundInEntityDfException from feast.feature_service import FeatureService from feast.feature_view import FeatureView from feast.field import Field @@ -34,263 +27,17 @@ driver, location, ) +from tests.utils.feature_records import ( + assert_feature_service_correctness, + assert_feature_service_entity_mapping_correctness, + get_expected_training_df, + get_response_feature_name, + validate_dataframes, +) np.random.seed(0) -def convert_timestamp_records_to_utc( - records: List[Dict[str, Any]], column: str -) -> List[Dict[str, Any]]: - for record in records: - record[column] = utils.make_tzaware(record[column]).astimezone(utc) - return records - - -# Find the latest record in the given time range and filter -def find_asof_record( - records: List[Dict[str, Any]], - ts_key: str, - ts_start: datetime, - ts_end: datetime, - filter_keys: Optional[List[str]] = None, - filter_values: Optional[List[Any]] = None, -) -> Dict[str, Any]: - filter_keys = filter_keys or [] - filter_values = filter_values or [] - assert len(filter_keys) == len(filter_values) - found_record: Dict[str, Any] = {} - for record in records: - if ( - all( - [ - record[filter_key] == filter_value - for filter_key, filter_value in zip(filter_keys, filter_values) - ] - ) - and ts_start <= record[ts_key] <= ts_end - ): - if not found_record or found_record[ts_key] < record[ts_key]: - found_record = record - return found_record - - -def get_expected_training_df( - customer_df: pd.DataFrame, - customer_fv: FeatureView, - driver_df: pd.DataFrame, - driver_fv: FeatureView, - orders_df: pd.DataFrame, - order_fv: FeatureView, - location_df: pd.DataFrame, - location_fv: FeatureView, - global_df: pd.DataFrame, - global_fv: FeatureView, - field_mapping_df: pd.DataFrame, - field_mapping_fv: FeatureView, - entity_df: pd.DataFrame, - event_timestamp: str, - full_feature_names: bool = False, -): - # Convert all pandas dataframes into records with UTC timestamps - customer_records = convert_timestamp_records_to_utc( - customer_df.to_dict("records"), customer_fv.batch_source.timestamp_field - ) - driver_records = convert_timestamp_records_to_utc( - driver_df.to_dict("records"), driver_fv.batch_source.timestamp_field - ) - order_records = convert_timestamp_records_to_utc( - orders_df.to_dict("records"), event_timestamp - ) - location_records = convert_timestamp_records_to_utc( - location_df.to_dict("records"), location_fv.batch_source.timestamp_field - ) - global_records = convert_timestamp_records_to_utc( - global_df.to_dict("records"), global_fv.batch_source.timestamp_field - ) - field_mapping_records = convert_timestamp_records_to_utc( - field_mapping_df.to_dict("records"), - field_mapping_fv.batch_source.timestamp_field, - ) - entity_rows = convert_timestamp_records_to_utc( - entity_df.to_dict("records"), event_timestamp - ) - - # Set sufficiently large ttl that it effectively functions as infinite for the calculations below. - default_ttl = timedelta(weeks=52) - - # Manually do point-in-time join of driver, customer, and order records against - # the entity df - for entity_row in entity_rows: - customer_record = find_asof_record( - customer_records, - ts_key=customer_fv.batch_source.timestamp_field, - ts_start=entity_row[event_timestamp] - - get_feature_view_ttl(customer_fv, default_ttl), - ts_end=entity_row[event_timestamp], - filter_keys=["customer_id"], - filter_values=[entity_row["customer_id"]], - ) - driver_record = find_asof_record( - driver_records, - ts_key=driver_fv.batch_source.timestamp_field, - ts_start=entity_row[event_timestamp] - - get_feature_view_ttl(driver_fv, default_ttl), - ts_end=entity_row[event_timestamp], - filter_keys=["driver_id"], - filter_values=[entity_row["driver_id"]], - ) - order_record = find_asof_record( - order_records, - ts_key=customer_fv.batch_source.timestamp_field, - ts_start=entity_row[event_timestamp] - - get_feature_view_ttl(order_fv, default_ttl), - ts_end=entity_row[event_timestamp], - filter_keys=["customer_id", "driver_id"], - filter_values=[entity_row["customer_id"], entity_row["driver_id"]], - ) - origin_record = find_asof_record( - location_records, - ts_key=location_fv.batch_source.timestamp_field, - ts_start=order_record[event_timestamp] - - get_feature_view_ttl(location_fv, default_ttl), - ts_end=order_record[event_timestamp], - filter_keys=["location_id"], - filter_values=[order_record["origin_id"]], - ) - destination_record = find_asof_record( - location_records, - ts_key=location_fv.batch_source.timestamp_field, - ts_start=order_record[event_timestamp] - - get_feature_view_ttl(location_fv, default_ttl), - ts_end=order_record[event_timestamp], - filter_keys=["location_id"], - filter_values=[order_record["destination_id"]], - ) - global_record = find_asof_record( - global_records, - ts_key=global_fv.batch_source.timestamp_field, - ts_start=order_record[event_timestamp] - - get_feature_view_ttl(global_fv, default_ttl), - ts_end=order_record[event_timestamp], - ) - - field_mapping_record = find_asof_record( - field_mapping_records, - ts_key=field_mapping_fv.batch_source.timestamp_field, - ts_start=order_record[event_timestamp] - - get_feature_view_ttl(field_mapping_fv, default_ttl), - ts_end=order_record[event_timestamp], - ) - - entity_row.update( - { - ( - f"customer_profile__{k}" if full_feature_names else k - ): customer_record.get(k, None) - for k in ( - "current_balance", - "avg_passenger_count", - "lifetime_trip_count", - ) - } - ) - entity_row.update( - { - (f"driver_stats__{k}" if full_feature_names else k): driver_record.get( - k, None - ) - for k in ("conv_rate", "avg_daily_trips") - } - ) - entity_row.update( - { - (f"order__{k}" if full_feature_names else k): order_record.get(k, None) - for k in ("order_is_success",) - } - ) - entity_row.update( - { - "origin__temperature": origin_record.get("temperature", None), - "destination__temperature": destination_record.get("temperature", None), - } - ) - entity_row.update( - { - (f"global_stats__{k}" if full_feature_names else k): global_record.get( - k, None - ) - for k in ( - "num_rides", - "avg_ride_length", - ) - } - ) - - # get field_mapping_record by column name, but label by feature name - entity_row.update( - { - ( - f"field_mapping__{feature}" if full_feature_names else feature - ): field_mapping_record.get(column, None) - for ( - column, - feature, - ) in field_mapping_fv.batch_source.field_mapping.items() - } - ) - - # Convert records back to pandas dataframe - expected_df = pd.DataFrame(entity_rows) - - # Move "event_timestamp" column to front - current_cols = expected_df.columns.tolist() - current_cols.remove(event_timestamp) - expected_df = expected_df[[event_timestamp] + current_cols] - - # Cast some columns to expected types, since we lose information when converting pandas DFs into Python objects. - if full_feature_names: - expected_column_types = { - "order__order_is_success": "int32", - "driver_stats__conv_rate": "float32", - "customer_profile__current_balance": "float32", - "customer_profile__avg_passenger_count": "float32", - "global_stats__avg_ride_length": "float32", - "field_mapping__feature_name": "int32", - } - else: - expected_column_types = { - "order_is_success": "int32", - "conv_rate": "float32", - "current_balance": "float32", - "avg_passenger_count": "float32", - "avg_ride_length": "float32", - "feature_name": "int32", - } - - for col, typ in expected_column_types.items(): - expected_df[col] = expected_df[col].astype(typ) - - conv_feature_name = "driver_stats__conv_rate" if full_feature_names else "conv_rate" - conv_plus_feature_name = response_feature_name( - "conv_rate_plus_100", full_feature_names - ) - expected_df[conv_plus_feature_name] = expected_df[conv_feature_name] + 100 - expected_df[ - response_feature_name("conv_rate_plus_100_rounded", full_feature_names) - ] = ( - expected_df[conv_plus_feature_name] - .astype("float") - .round() - .astype(pd.Int32Dtype()) - ) - if "val_to_add" in expected_df.columns: - expected_df[ - response_feature_name("conv_rate_plus_val_to_add", full_feature_names) - ] = (expected_df[conv_feature_name] + expected_df["val_to_add"]) - - return expected_df - - @pytest.mark.integration @pytest.mark.universal_offline_stores @pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: f"full:{v}") @@ -396,7 +143,7 @@ def test_historical_features(environment, universal_data_sources, full_feature_n print(str(f"Time to execute job_from_df.to_df() = '{(end_time - start_time)}'\n")) assert sorted(expected_df.columns) == sorted(actual_df_from_df_entities.columns) - assert_frame_equal( + validate_dataframes( expected_df, actual_df_from_df_entities, keys=[event_timestamp, "order_id", "driver_id", "customer_id"], @@ -420,7 +167,7 @@ def test_historical_features(environment, universal_data_sources, full_feature_n ) table_from_df_entities: pd.DataFrame = job_from_df.to_arrow().to_pandas() - assert_frame_equal( + validate_dataframes( expected_df, table_from_df_entities, keys=[event_timestamp, "order_id", "driver_id", "customer_id"], @@ -570,15 +317,15 @@ def test_historical_features_with_entities_from_query( # Not requesting the on demand transform with an entity_df query (can't add request data in them) expected_df_query = full_expected_df.drop( columns=[ - response_feature_name("conv_rate_plus_100", full_feature_names), - response_feature_name("conv_rate_plus_100_rounded", full_feature_names), - response_feature_name("avg_daily_trips", full_feature_names), - response_feature_name("conv_rate", full_feature_names), + get_response_feature_name("conv_rate_plus_100", full_feature_names), + get_response_feature_name("conv_rate_plus_100_rounded", full_feature_names), + get_response_feature_name("avg_daily_trips", full_feature_names), + get_response_feature_name("conv_rate", full_feature_names), "origin__temperature", "destination__temperature", ] ) - assert_frame_equal( + validate_dataframes( expected_df_query, actual_df_from_sql_entities, keys=[event_timestamp, "order_id", "driver_id", "customer_id"], @@ -590,7 +337,7 @@ def test_historical_features_with_entities_from_query( table_from_sql_entities[col].dtype ) - assert_frame_equal( + validate_dataframes( expected_df_query, table_from_sql_entities, keys=[event_timestamp, "order_id", "driver_id", "customer_id"], @@ -654,22 +401,22 @@ def test_historical_features_persisting( full_feature_names, ).drop( columns=[ - response_feature_name("conv_rate_plus_100", full_feature_names), - response_feature_name("conv_rate_plus_100_rounded", full_feature_names), - response_feature_name("avg_daily_trips", full_feature_names), - response_feature_name("conv_rate", full_feature_names), + get_response_feature_name("conv_rate_plus_100", full_feature_names), + get_response_feature_name("conv_rate_plus_100_rounded", full_feature_names), + get_response_feature_name("avg_daily_trips", full_feature_names), + get_response_feature_name("conv_rate", full_feature_names), "origin__temperature", "destination__temperature", ] ) - assert_frame_equal( + validate_dataframes( expected_df, saved_dataset.to_df(), keys=[event_timestamp, "driver_id", "customer_id"], ) - assert_frame_equal( + validate_dataframes( job.to_df(), saved_dataset.to_df(), keys=[event_timestamp, "driver_id", "customer_id"], @@ -732,16 +479,16 @@ def test_historical_features_with_no_ttl( full_feature_names, ).drop( columns=[ - response_feature_name("conv_rate_plus_100", full_feature_names), - response_feature_name("conv_rate_plus_100_rounded", full_feature_names), - response_feature_name("avg_daily_trips", full_feature_names), - response_feature_name("conv_rate", full_feature_names), + get_response_feature_name("conv_rate_plus_100", full_feature_names), + get_response_feature_name("conv_rate_plus_100_rounded", full_feature_names), + get_response_feature_name("avg_daily_trips", full_feature_names), + get_response_feature_name("conv_rate", full_feature_names), "origin__temperature", "destination__temperature", ] ) - assert_frame_equal( + validate_dataframes( expected_df, job.to_df(), keys=[event_timestamp, "driver_id", "customer_id"], @@ -842,139 +589,4 @@ def test_historical_features_from_bigquery_sources_containing_backfills(environm print(str(f"Time to execute job_from_df.to_df() = '{(end_time - start_time)}'\n")) assert sorted(expected_df.columns) == sorted(actual_df.columns) - assert_frame_equal(expected_df, actual_df, keys=["driver_id"]) - - -def response_feature_name(feature: str, full_feature_names: bool) -> str: - if feature in {"conv_rate", "avg_daily_trips"} and full_feature_names: - return f"driver_stats__{feature}" - - if ( - feature - in { - "conv_rate_plus_100", - "conv_rate_plus_100_rounded", - "conv_rate_plus_val_to_add", - } - and full_feature_names - ): - return f"conv_rate_plus_100__{feature}" - - return feature - - -def get_feature_view_ttl( - feature_view: FeatureView, default_ttl: timedelta -) -> timedelta: - """Returns the ttl of a feature view if it is non-zero. Otherwise returns the specified default.""" - return feature_view.ttl if feature_view.ttl else default_ttl - - -def assert_feature_service_correctness( - store, feature_service, full_feature_names, entity_df, expected_df, event_timestamp -): - - job_from_df = store.get_historical_features( - entity_df=entity_df, - features=feature_service, - full_feature_names=full_feature_names, - ) - - actual_df_from_df_entities = job_from_df.to_df() - - expected_df = expected_df[ - [ - event_timestamp, - "order_id", - "driver_id", - "customer_id", - response_feature_name("conv_rate", full_feature_names), - response_feature_name("conv_rate_plus_100", full_feature_names), - "driver_age", - ] - ] - - assert_frame_equal( - expected_df, - actual_df_from_df_entities, - keys=[event_timestamp, "order_id", "driver_id", "customer_id"], - ) - - -def assert_feature_service_entity_mapping_correctness( - store, feature_service, full_feature_names, entity_df, expected_df, event_timestamp -): - if full_feature_names: - job_from_df = store.get_historical_features( - entity_df=entity_df, - features=feature_service, - full_feature_names=full_feature_names, - ) - actual_df_from_df_entities = job_from_df.to_df() - - expected_df: pd.DataFrame = ( - expected_df.sort_values( - by=[ - event_timestamp, - "order_id", - "driver_id", - "customer_id", - "origin_id", - "destination_id", - ] - ) - .drop_duplicates() - .reset_index(drop=True) - ) - expected_df = expected_df[ - [ - event_timestamp, - "order_id", - "driver_id", - "customer_id", - "origin_id", - "destination_id", - "origin__temperature", - "destination__temperature", - ] - ] - - assert_frame_equal( - expected_df, - actual_df_from_df_entities, - keys=[ - event_timestamp, - "order_id", - "driver_id", - "customer_id", - "origin_id", - "destination_id", - ], - ) - else: - # using 2 of the same FeatureView without full_feature_names=True will result in collision - with pytest.raises(FeatureNameCollisionError): - job_from_df = store.get_historical_features( - entity_df=entity_df, - features=feature_service, - full_feature_names=full_feature_names, - ) - - -def assert_frame_equal(expected_df, actual_df, keys): - expected_df: pd.DataFrame = ( - expected_df.sort_values(by=keys).drop_duplicates().reset_index(drop=True) - ) - - actual_df = ( - actual_df[expected_df.columns] - .sort_values(by=keys) - .drop_duplicates() - .reset_index(drop=True) - ) - - pd_assert_frame_equal( - expected_df, - actual_df, - check_dtype=False, - ) + validate_dataframes(expected_df, actual_df, keys=["driver_id"]) diff --git a/sdk/python/tests/integration/online_store/test_online_retrieval.py b/sdk/python/tests/integration/online_store/test_online_retrieval.py index 9cf4d9a182..988af6e7e9 100644 --- a/sdk/python/tests/integration/online_store/test_online_retrieval.py +++ b/sdk/python/tests/integration/online_store/test_online_retrieval.py @@ -11,7 +11,7 @@ from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import RegistryConfig -from tests.utils.cli_utils import CliRunner, get_example_repo +from tests.utils.cli_repo_creator import CliRunner, get_example_repo @pytest.mark.integration diff --git a/sdk/python/tests/integration/online_store/test_push_online.py b/sdk/python/tests/integration/online_store/test_push_features_to_online_store.py similarity index 100% rename from sdk/python/tests/integration/online_store/test_push_online.py rename to sdk/python/tests/integration/online_store/test_push_features_to_online_store.py diff --git a/sdk/python/tests/integration/online_store/test_universal_online.py b/sdk/python/tests/integration/online_store/test_universal_online.py index 6521c9ed2f..738b00f7d7 100644 --- a/sdk/python/tests/integration/online_store/test_universal_online.py +++ b/sdk/python/tests/integration/online_store/test_universal_online.py @@ -34,7 +34,7 @@ create_driver_hourly_stats_feature_view, driver_feature_view, ) -from tests.utils.data_source_utils import prep_file_source +from tests.utils.data_source_test_creator import prep_file_source @pytest.mark.integration diff --git a/sdk/python/tests/integration/registration/test_cli.py b/sdk/python/tests/integration/registration/test_cli.py deleted file mode 100644 index 15e5cf09ee..0000000000 --- a/sdk/python/tests/integration/registration/test_cli.py +++ /dev/null @@ -1,365 +0,0 @@ -import os -import tempfile -import uuid -from contextlib import contextmanager -from pathlib import Path -from textwrap import dedent -from typing import List - -import pytest -import yaml -from assertpy import assertpy - -from feast import FeatureStore, RepoConfig -from tests.integration.feature_repos.integration_test_repo_config import ( - IntegrationTestRepoConfig, -) -from tests.integration.feature_repos.repo_configuration import Environment -from tests.integration.feature_repos.universal.data_source_creator import ( - DataSourceCreator, -) -from tests.integration.feature_repos.universal.data_sources.bigquery import ( - BigQueryDataSourceCreator, -) -from tests.integration.feature_repos.universal.data_sources.file import ( - FileDataSourceCreator, -) -from tests.integration.feature_repos.universal.data_sources.redshift import ( - RedshiftDataSourceCreator, -) -from tests.utils.cli_utils import CliRunner, get_example_repo -from tests.utils.online_read_write_test import basic_rw_test - - -@pytest.mark.integration -@pytest.mark.universal_offline_stores -def test_universal_cli(environment: Environment): - project = f"test_universal_cli_{str(uuid.uuid4()).replace('-', '')[:8]}" - runner = CliRunner() - - with tempfile.TemporaryDirectory() as repo_dir_name: - try: - repo_path = Path(repo_dir_name) - feature_store_yaml = make_feature_store_yaml( - project, environment.test_repo_config, repo_path - ) - - repo_config = repo_path / "feature_store.yaml" - - repo_config.write_text(dedent(feature_store_yaml)) - - repo_example = repo_path / "example.py" - repo_example.write_text(get_example_repo("example_feature_repo_1.py")) - result = runner.run(["apply"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - - # Store registry contents, to be compared later. - fs = FeatureStore(repo_path=str(repo_path)) - registry_dict = fs.registry.to_dict(project=project) - # Save only the specs, not the metadata. - registry_specs = { - key: [fco["spec"] if "spec" in fco else fco for fco in value] - for key, value in registry_dict.items() - } - - # entity & feature view list commands should succeed - result = runner.run(["entities", "list"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - result = runner.run(["feature-views", "list"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - result = runner.run(["feature-services", "list"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - result = runner.run(["data-sources", "list"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - - # entity & feature view describe commands should succeed when objects exist - result = runner.run(["entities", "describe", "driver"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - result = runner.run( - ["feature-views", "describe", "driver_locations"], cwd=repo_path - ) - assertpy.assert_that(result.returncode).is_equal_to(0) - result = runner.run( - ["feature-services", "describe", "driver_locations_service"], - cwd=repo_path, - ) - assertpy.assert_that(result.returncode).is_equal_to(0) - assertpy.assert_that(fs.list_feature_views()).is_length(4) - result = runner.run( - ["data-sources", "describe", "customer_profile_source"], - cwd=repo_path, - ) - assertpy.assert_that(result.returncode).is_equal_to(0) - assertpy.assert_that(fs.list_data_sources()).is_length(4) - - # entity & feature view describe commands should fail when objects don't exist - result = runner.run(["entities", "describe", "foo"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(1) - result = runner.run(["feature-views", "describe", "foo"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(1) - result = runner.run(["feature-services", "describe", "foo"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(1) - result = runner.run(["data-sources", "describe", "foo"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(1) - - # Doing another apply should be a no op, and should not cause errors - result = runner.run(["apply"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - basic_rw_test( - FeatureStore(repo_path=str(repo_path), config=None), - view_name="driver_locations", - ) - - # Confirm that registry contents have not changed. - registry_dict = fs.registry.to_dict(project=project) - assertpy.assert_that(registry_specs).is_equal_to( - { - key: [fco["spec"] if "spec" in fco else fco for fco in value] - for key, value in registry_dict.items() - } - ) - - result = runner.run(["teardown"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - finally: - runner.run(["teardown"], cwd=repo_path) - - -def make_feature_store_yaml(project, test_repo_config, repo_dir_name: Path): - offline_creator: DataSourceCreator = test_repo_config.offline_store_creator(project) - - offline_store_config = offline_creator.create_offline_store_config() - online_store = test_repo_config.online_store - - config = RepoConfig( - registry=str(Path(repo_dir_name) / "registry.db"), - project=project, - provider=test_repo_config.provider, - offline_store=offline_store_config, - online_store=online_store, - repo_path=str(Path(repo_dir_name)), - ) - config_dict = config.dict() - if ( - isinstance(config_dict["online_store"], dict) - and "redis_type" in config_dict["online_store"] - ): - if str(config_dict["online_store"]["redis_type"]) == "RedisType.redis_cluster": - config_dict["online_store"]["redis_type"] = "redis_cluster" - elif str(config_dict["online_store"]["redis_type"]) == "RedisType.redis": - config_dict["online_store"]["redis_type"] = "redis" - config_dict["repo_path"] = str(config_dict["repo_path"]) - return yaml.safe_dump(config_dict) - - -NULLABLE_ONLINE_STORE_CONFIGS: List[IntegrationTestRepoConfig] = [ - IntegrationTestRepoConfig( - provider="local", - offline_store_creator=FileDataSourceCreator, - online_store=None, - ), -] - -if os.getenv("FEAST_IS_LOCAL_TEST", "False") == "True": - NULLABLE_ONLINE_STORE_CONFIGS.extend( - [ - IntegrationTestRepoConfig( - provider="gcp", - offline_store_creator=BigQueryDataSourceCreator, - online_store=None, - ), - IntegrationTestRepoConfig( - provider="aws", - offline_store_creator=RedshiftDataSourceCreator, - online_store=None, - ), - ] - ) - - -@pytest.mark.integration -@pytest.mark.parametrize("test_nullable_online_store", NULLABLE_ONLINE_STORE_CONFIGS) -def test_nullable_online_store(test_nullable_online_store) -> None: - project = f"test_nullable_online_store{str(uuid.uuid4()).replace('-', '')[:8]}" - runner = CliRunner() - - with tempfile.TemporaryDirectory() as repo_dir_name: - try: - repo_path = Path(repo_dir_name) - feature_store_yaml = make_feature_store_yaml( - project, test_nullable_online_store, repo_path - ) - - repo_config = repo_path / "feature_store.yaml" - - repo_config.write_text(dedent(feature_store_yaml)) - - repo_example = repo_path / "example.py" - repo_example.write_text(get_example_repo("example_feature_repo_1.py")) - result = runner.run(["apply"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - finally: - runner.run(["teardown"], cwd=repo_path) - - -@pytest.mark.integration -@pytest.mark.universal_offline_stores -def test_odfv_apply(environment) -> None: - project = f"test_odfv_apply{str(uuid.uuid4()).replace('-', '')[:8]}" - runner = CliRunner() - - with tempfile.TemporaryDirectory() as repo_dir_name: - try: - repo_path = Path(repo_dir_name) - feature_store_yaml = make_feature_store_yaml( - project, environment.test_repo_config, repo_path - ) - - repo_config = repo_path / "feature_store.yaml" - - repo_config.write_text(dedent(feature_store_yaml)) - - repo_example = repo_path / "example.py" - repo_example.write_text(get_example_repo("on_demand_feature_view_repo.py")) - result = runner.run(["apply"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - - # entity & feature view list commands should succeed - result = runner.run(["entities", "list"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - result = runner.run(["on-demand-feature-views", "list"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - finally: - runner.run(["teardown"], cwd=repo_path) - - -@contextmanager -def setup_third_party_provider_repo(provider_name: str): - with tempfile.TemporaryDirectory() as repo_dir_name: - - # Construct an example repo in a temporary dir - repo_path = Path(repo_dir_name) - - repo_config = repo_path / "feature_store.yaml" - - repo_config.write_text( - dedent( - f""" - project: foo - registry: data/registry.db - provider: {provider_name} - online_store: - path: data/online_store.db - type: sqlite - offline_store: - type: file - """ - ) - ) - - (repo_path / "foo").mkdir() - repo_example = repo_path / "foo/provider.py" - repo_example.write_text( - (Path(__file__).parents[2] / "foo_provider.py").read_text() - ) - - yield repo_path - - -@contextmanager -def setup_third_party_registry_store_repo(registry_store: str): - with tempfile.TemporaryDirectory() as repo_dir_name: - - # Construct an example repo in a temporary dir - repo_path = Path(repo_dir_name) - - repo_config = repo_path / "feature_store.yaml" - - repo_config.write_text( - dedent( - f""" - project: foo - registry: - registry_store_type: {registry_store} - path: foobar://foo.bar - provider: local - online_store: - path: data/online_store.db - type: sqlite - offline_store: - type: file - """ - ) - ) - - (repo_path / "foo").mkdir() - repo_example = repo_path / "foo/registry_store.py" - repo_example.write_text( - (Path(__file__).parents[2] / "foo_registry_store.py").read_text() - ) - - yield repo_path - - -def test_3rd_party_providers() -> None: - """ - Test running apply on third party providers - """ - runner = CliRunner() - # Check with incorrect built-in provider name (no dots) - with setup_third_party_provider_repo("feast123") as repo_path: - return_code, output = runner.run_with_output(["apply"], cwd=repo_path) - assertpy.assert_that(return_code).is_equal_to(1) - assertpy.assert_that(output).contains(b"Provider 'feast123' is not implemented") - # Check with incorrect third-party provider name (with dots) - with setup_third_party_provider_repo("feast_foo.Provider") as repo_path: - return_code, output = runner.run_with_output(["apply"], cwd=repo_path) - assertpy.assert_that(return_code).is_equal_to(1) - assertpy.assert_that(output).contains( - b"Could not import module 'feast_foo' while attempting to load class 'Provider'" - ) - # Check with incorrect third-party provider name (with dots) - with setup_third_party_provider_repo("foo.FooProvider") as repo_path: - return_code, output = runner.run_with_output(["apply"], cwd=repo_path) - assertpy.assert_that(return_code).is_equal_to(1) - assertpy.assert_that(output).contains( - b"Could not import class 'FooProvider' from module 'foo'" - ) - # Check with correct third-party provider name - with setup_third_party_provider_repo("foo.provider.FooProvider") as repo_path: - return_code, output = runner.run_with_output(["apply"], cwd=repo_path) - assertpy.assert_that(return_code).is_equal_to(0) - - -def test_3rd_party_registry_store() -> None: - """ - Test running apply on third party registry stores - """ - runner = CliRunner() - # Check with incorrect built-in provider name (no dots) - with setup_third_party_registry_store_repo("feast123") as repo_path: - return_code, output = runner.run_with_output(["apply"], cwd=repo_path) - assertpy.assert_that(return_code).is_equal_to(1) - assertpy.assert_that(output).contains( - b'Registry store class name should end with "RegistryStore"' - ) - # Check with incorrect third-party registry store name (with dots) - with setup_third_party_registry_store_repo("feast_foo.RegistryStore") as repo_path: - return_code, output = runner.run_with_output(["apply"], cwd=repo_path) - assertpy.assert_that(return_code).is_equal_to(1) - assertpy.assert_that(output).contains( - b"Could not import module 'feast_foo' while attempting to load class 'RegistryStore'" - ) - # Check with incorrect third-party registry store name (with dots) - with setup_third_party_registry_store_repo("foo.FooRegistryStore") as repo_path: - return_code, output = runner.run_with_output(["apply"], cwd=repo_path) - assertpy.assert_that(return_code).is_equal_to(1) - assertpy.assert_that(output).contains( - b"Could not import class 'FooRegistryStore' from module 'foo'" - ) - # Check with correct third-party registry store name - with setup_third_party_registry_store_repo( - "foo.registry_store.FooRegistryStore" - ) as repo_path: - return_code, output = runner.run_with_output(["apply"], cwd=repo_path) - assertpy.assert_that(return_code).is_equal_to(0) diff --git a/sdk/python/tests/integration/registration/test_feature_service_apply.py b/sdk/python/tests/integration/registration/test_feature_service_apply.py deleted file mode 100644 index 7824f6333e..0000000000 --- a/sdk/python/tests/integration/registration/test_feature_service_apply.py +++ /dev/null @@ -1,29 +0,0 @@ -import pytest - -from feast import FeatureService -from tests.utils.cli_utils import CliRunner, get_example_repo - - -@pytest.mark.integration -def test_read_pre_applied() -> None: - """ - Read feature values from the FeatureStore using a FeatureService. - """ - runner = CliRunner() - with runner.local_repo( - get_example_repo("example_feature_repo_1.py"), "bigquery" - ) as store: - - assert len(store.list_feature_services()) == 1 - fs = store.get_feature_service("driver_locations_service") - assert len(fs.tags) == 1 - assert fs.tags["release"] == "production" - - fv = store.get_feature_view("driver_locations") - - fs = FeatureService(name="new_feature_service", features=[fv[["lon"]]]) - - store.apply([fs]) - - assert len(store.list_feature_services()) == 2 - store.get_feature_service("new_feature_service") diff --git a/sdk/python/tests/integration/registration/test_feature_store.py b/sdk/python/tests/integration/registration/test_feature_store.py index 12e9658649..7b95afadba 100644 --- a/sdk/python/tests/integration/registration/test_feature_store.py +++ b/sdk/python/tests/integration/registration/test_feature_store.py @@ -13,7 +13,7 @@ # limitations under the License. import os import time -from datetime import datetime, timedelta +from datetime import timedelta from tempfile import mkstemp import pytest @@ -30,96 +30,13 @@ from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig from feast.repo_config import RepoConfig from feast.types import Array, Bytes, Float64, Int64, String -from tests.utils.data_source_utils import ( +from tests.utils.data_source_test_creator import ( prep_file_source, simple_bq_source_using_query_arg, simple_bq_source_using_table_arg, ) -@pytest.fixture -def feature_store_with_local_registry(): - fd, registry_path = mkstemp() - fd, online_store_path = mkstemp() - return FeatureStore( - config=RepoConfig( - registry=registry_path, - project="default", - provider="local", - online_store=SqliteOnlineStoreConfig(path=online_store_path), - ) - ) - - -@pytest.fixture -def feature_store_with_gcs_registry(): - from google.cloud import storage - - storage_client = storage.Client() - bucket_name = f"feast-registry-test-{int(time.time() * 1000)}" - bucket = storage_client.bucket(bucket_name) - bucket = storage_client.create_bucket(bucket) - bucket.add_lifecycle_delete_rule( - age=14 - ) # delete buckets automatically after 14 days - bucket.patch() - bucket.blob("registry.db") - - return FeatureStore( - config=RepoConfig( - registry=f"gs://{bucket_name}/registry.db", - project="default", - provider="gcp", - ) - ) - - -@pytest.fixture -def feature_store_with_s3_registry(): - aws_registry_path = os.getenv( - "AWS_REGISTRY_PATH", "s3://feast-integration-tests/registries" - ) - return FeatureStore( - config=RepoConfig( - registry=f"{aws_registry_path}/{int(time.time() * 1000)}/registry.db", - project="default", - provider="aws", - online_store=DynamoDBOnlineStoreConfig( - region=os.getenv("AWS_REGION", "us-west-2") - ), - offline_store=FileOfflineStoreConfig(), - ) - ) - - -@pytest.mark.parametrize( - "test_feature_store", - [lazy_fixture("feature_store_with_local_registry")], -) -def test_apply_entity_success(test_feature_store): - entity = Entity( - name="driver_car_id", - description="Car driver id", - tags={"team": "matchmaking"}, - ) - - # Register Entity - test_feature_store.apply(entity) - - entities = test_feature_store.list_entities() - - entity = entities[0] - assert ( - len(entities) == 1 - and entity.name == "driver_car_id" - and entity.description == "Car driver id" - and "team" in entity.tags - and entity.tags["team"] == "matchmaking" - ) - - test_feature_store.teardown() - - @pytest.mark.integration @pytest.mark.parametrize( "test_feature_store", @@ -160,60 +77,6 @@ def test_apply_entity_integration(test_feature_store): test_feature_store.teardown() -@pytest.mark.parametrize( - "test_feature_store", - [lazy_fixture("feature_store_with_local_registry")], -) -def test_apply_feature_view_success(test_feature_store): - # Create Feature Views - batch_source = FileSource( - file_format=ParquetFormat(), - path="file://feast/*", - timestamp_field="ts_col", - created_timestamp_column="timestamp", - date_partition_column="date_partition_col", - ) - - entity = Entity(name="fs1_my_entity_1", join_keys=["entity_id"]) - - fv1 = FeatureView( - name="my_feature_view_1", - schema=[ - Field(name="fs1_my_feature_1", dtype=Int64), - Field(name="fs1_my_feature_2", dtype=String), - Field(name="fs1_my_feature_3", dtype=Array(String)), - Field(name="fs1_my_feature_4", dtype=Array(Bytes)), - Field(name="entity_id", dtype=Int64), - ], - entities=[entity], - tags={"team": "matchmaking"}, - batch_source=batch_source, - ttl=timedelta(minutes=5), - ) - - # Register Feature View - test_feature_store.apply([entity, fv1]) - - feature_views = test_feature_store.list_feature_views() - - # List Feature Views - assert ( - len(feature_views) == 1 - and feature_views[0].name == "my_feature_view_1" - and feature_views[0].features[0].name == "fs1_my_feature_1" - and feature_views[0].features[0].dtype == Int64 - and feature_views[0].features[1].name == "fs1_my_feature_2" - and feature_views[0].features[1].dtype == String - and feature_views[0].features[2].name == "fs1_my_feature_3" - and feature_views[0].features[2].dtype == Array(String) - and feature_views[0].features[3].name == "fs1_my_feature_4" - and feature_views[0].features[3].dtype == Array(Bytes) - and feature_views[0].entities[0] == "fs1_my_entity_1" - ) - - test_feature_store.teardown() - - @pytest.mark.integration @pytest.mark.parametrize( "test_feature_store", @@ -357,68 +220,8 @@ def test_apply_feature_view_integration(test_feature_store): test_feature_store.teardown() -@pytest.mark.parametrize( - "test_feature_store", - [lazy_fixture("feature_store_with_local_registry")], -) -def test_apply_object_and_read(test_feature_store): - assert isinstance(test_feature_store, FeatureStore) - # Create Feature Views - batch_source = FileSource( - file_format=ParquetFormat(), - path="file://feast/*", - timestamp_field="ts_col", - created_timestamp_column="timestamp", - ) - - e1 = Entity(name="fs1_my_entity_1", description="something") - - e2 = Entity(name="fs1_my_entity_2", description="something") - - fv1 = FeatureView( - name="my_feature_view_1", - schema=[ - Field(name="fs1_my_feature_1", dtype=Int64), - Field(name="fs1_my_feature_2", dtype=String), - Field(name="fs1_my_feature_3", dtype=Array(String)), - Field(name="fs1_my_feature_4", dtype=Array(Bytes)), - Field(name="fs1_my_entity_1", dtype=Int64), - ], - entities=[e1], - tags={"team": "matchmaking"}, - batch_source=batch_source, - ttl=timedelta(minutes=5), - ) - - fv2 = FeatureView( - name="my_feature_view_2", - schema=[ - Field(name="fs1_my_feature_1", dtype=Int64), - Field(name="fs1_my_feature_2", dtype=String), - Field(name="fs1_my_feature_3", dtype=Array(String)), - Field(name="fs1_my_feature_4", dtype=Array(Bytes)), - Field(name="fs1_my_entity_2", dtype=Int64), - ], - entities=[e2], - tags={"team": "matchmaking"}, - batch_source=batch_source, - ttl=timedelta(minutes=5), - ) - - # Register Feature View - test_feature_store.apply([fv1, e1, fv2, e2]) - - fv1_actual = test_feature_store.get_feature_view("my_feature_view_1") - e1_actual = test_feature_store.get_entity("fs1_my_entity_1") - - assert e1 == e1_actual - assert fv2 != fv1_actual - assert e2 != e1_actual - - test_feature_store.teardown() - - -def test_apply_remote_repo(): +@pytest.fixture +def feature_store_with_local_registry(): fd, registry_path = mkstemp() fd, online_store_path = mkstemp() return FeatureStore( @@ -431,94 +234,42 @@ def test_apply_remote_repo(): ) -@pytest.mark.parametrize( - "test_feature_store", - [lazy_fixture("feature_store_with_local_registry")], -) -@pytest.mark.parametrize("dataframe_source", [lazy_fixture("simple_dataset_1")]) -def test_reapply_feature_view_success(test_feature_store, dataframe_source): - with prep_file_source(df=dataframe_source, timestamp_field="ts_1") as file_source: - - e = Entity(name="id", join_keys=["id_join_key"]) - - # Create Feature View - fv1 = FeatureView( - name="my_feature_view_1", - schema=[Field(name="string_col", dtype=String)], - entities=[e], - batch_source=file_source, - ttl=timedelta(minutes=5), - ) - - # Register Feature View - test_feature_store.apply([fv1, e]) - - # Check Feature View - fv_stored = test_feature_store.get_feature_view(fv1.name) - assert len(fv_stored.materialization_intervals) == 0 - - # Run materialization - test_feature_store.materialize(datetime(2020, 1, 1), datetime(2021, 1, 1)) - - # Check Feature View - fv_stored = test_feature_store.get_feature_view(fv1.name) - assert len(fv_stored.materialization_intervals) == 1 - - # Apply again - test_feature_store.apply([fv1]) +@pytest.fixture +def feature_store_with_gcs_registry(): + from google.cloud import storage - # Check Feature View - fv_stored = test_feature_store.get_feature_view(fv1.name) - assert len(fv_stored.materialization_intervals) == 1 + storage_client = storage.Client() + bucket_name = f"feast-registry-test-{int(time.time() * 1000)}" + bucket = storage_client.bucket(bucket_name) + bucket = storage_client.create_bucket(bucket) + bucket.add_lifecycle_delete_rule( + age=14 + ) # delete buckets automatically after 14 days + bucket.patch() + bucket.blob("registry.db") - # Change and apply Feature View - fv1 = FeatureView( - name="my_feature_view_1", - schema=[Field(name="int64_col", dtype=Int64)], - entities=[e], - batch_source=file_source, - ttl=timedelta(minutes=5), + return FeatureStore( + config=RepoConfig( + registry=f"gs://{bucket_name}/registry.db", + project="default", + provider="gcp", ) - test_feature_store.apply([fv1]) - - # Check Feature View - fv_stored = test_feature_store.get_feature_view(fv1.name) - assert len(fv_stored.materialization_intervals) == 0 - - test_feature_store.teardown() - - -def test_apply_conflicting_featureview_names(feature_store_with_local_registry): - """Test applying feature views with non-case-insensitively unique names""" - driver = Entity(name="driver", join_keys=["driver_id"]) - customer = Entity(name="customer", join_keys=["customer_id"]) - - driver_stats = FeatureView( - name="driver_hourly_stats", - entities=[driver], - ttl=timedelta(seconds=10), - online=False, - batch_source=FileSource(path="driver_stats.parquet"), - tags={}, ) - customer_stats = FeatureView( - name="DRIVER_HOURLY_STATS", - entities=[customer], - ttl=timedelta(seconds=10), - online=False, - batch_source=FileSource(path="customer_stats.parquet"), - tags={}, + +@pytest.fixture +def feature_store_with_s3_registry(): + aws_registry_path = os.getenv( + "AWS_REGISTRY_PATH", "s3://feast-integration-tests/registries" ) - try: - feature_store_with_local_registry.apply([driver_stats, customer_stats]) - error = None - except ValueError as e: - error = e - assert ( - isinstance(error, ValueError) - and "Please ensure that all feature view names are case-insensitively unique" - in error.args[0] + return FeatureStore( + config=RepoConfig( + registry=f"{aws_registry_path}/{int(time.time() * 1000)}/registry.db", + project="default", + provider="aws", + online_store=DynamoDBOnlineStoreConfig( + region=os.getenv("AWS_REGION", "us-west-2") + ), + offline_store=FileOfflineStoreConfig(), + ) ) - - feature_store_with_local_registry.teardown() diff --git a/sdk/python/tests/integration/registration/test_inference.py b/sdk/python/tests/integration/registration/test_inference.py index f660c46b15..de02fe53fe 100644 --- a/sdk/python/tests/integration/registration/test_inference.py +++ b/sdk/python/tests/integration/registration/test_inference.py @@ -1,79 +1,17 @@ from copy import deepcopy -import pandas as pd import pytest -from feast import ( - BigQuerySource, - Entity, - Feature, - FeatureService, - FileSource, - RedshiftSource, - RepoConfig, - SnowflakeSource, - ValueType, -) -from feast.data_source import RequestSource -from feast.errors import ( - DataSourceNoNameException, - RegistryInferenceFailure, - SpecifiedFeaturesNotPresentError, -) -from feast.feature_view import FeatureView -from feast.field import Field -from feast.inference import ( - update_data_sources_with_inferred_event_timestamp_col, - update_feature_views_with_inferred_features_and_entities, -) -from feast.infra.offline_stores.contrib.spark_offline_store.spark_source import ( - SparkSource, -) -from feast.on_demand_feature_view import on_demand_feature_view -from feast.types import Float32, Float64, Int64, String, UnixTimestamp -from tests.utils.data_source_utils import ( +from feast import RepoConfig +from feast.errors import RegistryInferenceFailure +from feast.inference import update_data_sources_with_inferred_event_timestamp_col +from tests.utils.data_source_test_creator import ( prep_file_source, simple_bq_source_using_query_arg, simple_bq_source_using_table_arg, ) -def test_infer_datasource_names_file(): - file_path = "path/to/test.csv" - data_source = FileSource(path=file_path) - assert data_source.name == file_path - - source_name = "my_name" - data_source = FileSource(name=source_name, path=file_path) - assert data_source.name == source_name - - -def test_infer_datasource_names_dwh(): - table = "project.table" - dwh_classes = [BigQuerySource, RedshiftSource, SnowflakeSource, SparkSource] - - for dwh_class in dwh_classes: - data_source = dwh_class(table=table) - assert data_source.name == table - - source_name = "my_name" - data_source_with_table = dwh_class(name=source_name, table=table) - assert data_source_with_table.name == source_name - data_source_with_query = dwh_class( - name=source_name, query=f"SELECT * from {table}" - ) - assert data_source_with_query.name == source_name - - # If we have a query and no name, throw an error - if dwh_class == SparkSource: - with pytest.raises(DataSourceNoNameException): - print(f"Testing dwh {dwh_class}") - data_source = dwh_class(query="test_query") - else: - data_source = dwh_class(query="test_query") - assert data_source.name == "" - - @pytest.mark.integration def test_update_file_data_source_with_inferred_event_timestamp_col(simple_dataset_1): df_with_two_viable_timestamp_cols = simple_dataset_1.copy(deep=True) @@ -123,330 +61,3 @@ def test_update_data_sources_with_inferred_event_timestamp_col(universal_data_so assert actual_event_timestamp_cols == ["event_timestamp"] * len( data_sources_copy.values() ) - - -def test_on_demand_features_type_inference(): - # Create Feature Views - date_request = RequestSource( - name="date_request", - schema=[Field(name="some_date", dtype=UnixTimestamp)], - ) - - @on_demand_feature_view( - sources=[date_request], - schema=[ - Field(name="output", dtype=UnixTimestamp), - Field(name="string_output", dtype=String), - ], - ) - def test_view(features_df: pd.DataFrame) -> pd.DataFrame: - data = pd.DataFrame() - data["output"] = features_df["some_date"] - data["string_output"] = features_df["some_date"].astype(pd.StringDtype()) - return data - - test_view.infer_features() - - @on_demand_feature_view( - # Note: we deliberately use `inputs` instead of `sources` to test that `inputs` - # still works correctly, even though it is deprecated. - # TODO(felixwang9817): Remove references to `inputs` once it is fully deprecated. - inputs={"date_request": date_request}, - features=[ - Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), - Feature(name="object_output", dtype=ValueType.STRING), - ], - ) - def invalid_test_view(features_df: pd.DataFrame) -> pd.DataFrame: - data = pd.DataFrame() - data["output"] = features_df["some_date"] - data["object_output"] = features_df["some_date"].astype(str) - return data - - with pytest.raises(ValueError, match="Value with native type object"): - invalid_test_view.infer_features() - - @on_demand_feature_view( - # Note: we deliberately use positional arguments here to test that they work correctly, - # even though positional arguments are deprecated in favor of keyword arguments. - # TODO(felixwang9817): Remove positional arguments once they are fully deprecated. - [ - Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), - Feature(name="missing", dtype=ValueType.STRING), - ], - {"date_request": date_request}, - ) - def test_view_with_missing_feature(features_df: pd.DataFrame) -> pd.DataFrame: - data = pd.DataFrame() - data["output"] = features_df["some_date"] - return data - - with pytest.raises(SpecifiedFeaturesNotPresentError): - test_view_with_missing_feature.infer_features() - - -# TODO(kevjumba): remove this in feast 0.24 when deprecating -@pytest.mark.parametrize( - "request_source_schema", - [ - [Field(name="some_date", dtype=UnixTimestamp)], - {"some_date": ValueType.UNIX_TIMESTAMP}, - ], -) -def test_datasource_inference(request_source_schema): - # Create Feature Views - date_request = RequestSource( - name="date_request", - schema=request_source_schema, - ) - - @on_demand_feature_view( - # Note: we deliberately use positional arguments here to test that they work correctly, - # even though positional arguments are deprecated in favor of keyword arguments. - # TODO(felixwang9817): Remove positional arguments once they are fully deprecated. - [ - Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), - Feature(name="string_output", dtype=ValueType.STRING), - ], - sources=[date_request], - ) - def test_view(features_df: pd.DataFrame) -> pd.DataFrame: - data = pd.DataFrame() - data["output"] = features_df["some_date"] - data["string_output"] = features_df["some_date"].astype(pd.StringDtype()) - return data - - test_view.infer_features() - - @on_demand_feature_view( - sources=[date_request], - schema=[ - Field(name="output", dtype=UnixTimestamp), - Field(name="object_output", dtype=String), - ], - ) - def invalid_test_view(features_df: pd.DataFrame) -> pd.DataFrame: - data = pd.DataFrame() - data["output"] = features_df["some_date"] - data["object_output"] = features_df["some_date"].astype(str) - return data - - with pytest.raises(ValueError, match="Value with native type object"): - invalid_test_view.infer_features() - - @on_demand_feature_view( - sources=[date_request], - features=[ - Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), - Feature(name="missing", dtype=ValueType.STRING), - ], - ) - def test_view_with_missing_feature(features_df: pd.DataFrame) -> pd.DataFrame: - data = pd.DataFrame() - data["output"] = features_df["some_date"] - return data - - with pytest.raises(SpecifiedFeaturesNotPresentError): - test_view_with_missing_feature.infer_features() - - -def test_feature_view_inference_respects_basic_inference(): - """ - Tests that feature view inference respects the basic inference that occurs during creation. - """ - file_source = FileSource(name="test", path="test path") - entity1 = Entity(name="test1", join_keys=["test_column_1"]) - entity2 = Entity(name="test2", join_keys=["test_column_2"]) - feature_view_1 = FeatureView( - name="test1", - entities=[entity1], - schema=[ - Field(name="feature", dtype=Float32), - Field(name="test_column_1", dtype=String), - ], - source=file_source, - ) - feature_view_2 = FeatureView( - name="test2", - entities=[entity1, entity2], - schema=[ - Field(name="feature", dtype=Float32), - Field(name="test_column_1", dtype=String), - Field(name="test_column_2", dtype=String), - ], - source=file_source, - ) - - assert len(feature_view_1.schema) == 2 - assert len(feature_view_1.features) == 1 - assert len(feature_view_1.entity_columns) == 1 - - update_feature_views_with_inferred_features_and_entities( - [feature_view_1], [entity1], RepoConfig(provider="local", project="test") - ) - assert len(feature_view_1.schema) == 2 - assert len(feature_view_1.features) == 1 - assert len(feature_view_1.entity_columns) == 1 - - assert len(feature_view_2.schema) == 3 - assert len(feature_view_2.features) == 1 - assert len(feature_view_2.entity_columns) == 2 - - update_feature_views_with_inferred_features_and_entities( - [feature_view_2], - [entity1, entity2], - RepoConfig(provider="local", project="test"), - ) - assert len(feature_view_2.schema) == 3 - assert len(feature_view_2.features) == 1 - assert len(feature_view_2.entity_columns) == 2 - - -def test_feature_view_inference_on_entity_columns(simple_dataset_1): - """ - Tests that feature view inference correctly infers entity columns. - """ - with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: - entity1 = Entity(name="test1", join_keys=["id_join_key"]) - feature_view_1 = FeatureView( - name="test1", - entities=[entity1], - schema=[Field(name="int64_col", dtype=Int64)], - source=file_source, - ) - - assert len(feature_view_1.schema) == 1 - assert len(feature_view_1.features) == 1 - assert len(feature_view_1.entity_columns) == 0 - - update_feature_views_with_inferred_features_and_entities( - [feature_view_1], [entity1], RepoConfig(provider="local", project="test") - ) - - # The schema is only used as a parameter, as is therefore not updated during inference. - assert len(feature_view_1.schema) == 1 - - # Since there is already a feature specified, additional features are not inferred. - assert len(feature_view_1.features) == 1 - - # The single entity column is inferred correctly. - assert len(feature_view_1.entity_columns) == 1 - - -def test_feature_view_inference_respects_entity_value_type(simple_dataset_1): - """ - Tests that feature view inference still respects an entity's value type. - """ - # TODO(felixwang9817): Remove this test once entity value_type is removed. - with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: - entity1 = Entity( - name="test1", join_keys=["id_join_key"], value_type=ValueType.STRING - ) - feature_view_1 = FeatureView( - name="test1", - entities=[entity1], - schema=[Field(name="int64_col", dtype=Int64)], - source=file_source, - ) - - assert len(feature_view_1.schema) == 1 - assert len(feature_view_1.features) == 1 - assert len(feature_view_1.entity_columns) == 0 - - update_feature_views_with_inferred_features_and_entities( - [feature_view_1], [entity1], RepoConfig(provider="local", project="test") - ) - - # The schema is only used as a parameter, as is therefore not updated during inference. - assert len(feature_view_1.schema) == 1 - - # Since there is already a feature specified, additional features are not inferred. - assert len(feature_view_1.features) == 1 - - # The single entity column is inferred correctly and has type String. - assert len(feature_view_1.entity_columns) == 1 - assert feature_view_1.entity_columns[0].dtype == String - - -def test_feature_view_inference_on_feature_columns(simple_dataset_1): - """ - Tests that feature view inference correctly infers feature columns. - """ - with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: - entity1 = Entity(name="test1", join_keys=["id_join_key"]) - feature_view_1 = FeatureView( - name="test1", - entities=[entity1], - schema=[Field(name="id_join_key", dtype=Int64)], - source=file_source, - ) - - assert len(feature_view_1.schema) == 1 - assert len(feature_view_1.features) == 0 - assert len(feature_view_1.entity_columns) == 1 - - update_feature_views_with_inferred_features_and_entities( - [feature_view_1], [entity1], RepoConfig(provider="local", project="test") - ) - - # The schema is only used as a parameter, as is therefore not updated during inference. - assert len(feature_view_1.schema) == 1 - - # All three feature columns are inferred correctly. - assert len(feature_view_1.features) == 3 - print(feature_view_1.features) - feature_column_1 = Field(name="float_col", dtype=Float64) - feature_column_2 = Field(name="int64_col", dtype=Int64) - feature_column_3 = Field(name="string_col", dtype=String) - assert feature_column_1 in feature_view_1.features - assert feature_column_2 in feature_view_1.features - assert feature_column_3 in feature_view_1.features - - # The single entity column remains. - assert len(feature_view_1.entity_columns) == 1 - - -def test_update_feature_services_with_inferred_features(simple_dataset_1): - with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: - entity1 = Entity(name="test1", join_keys=["id_join_key"]) - feature_view_1 = FeatureView( - name="test1", - entities=[entity1], - source=file_source, - ) - feature_view_2 = FeatureView( - name="test2", - entities=[entity1], - source=file_source, - ) - - feature_service = FeatureService( - name="fs_1", features=[feature_view_1[["string_col"]], feature_view_2] - ) - assert len(feature_service.feature_view_projections) == 2 - assert len(feature_service.feature_view_projections[0].features) == 0 - assert len(feature_service.feature_view_projections[0].desired_features) == 1 - assert len(feature_service.feature_view_projections[1].features) == 0 - assert len(feature_service.feature_view_projections[1].desired_features) == 0 - - update_feature_views_with_inferred_features_and_entities( - [feature_view_1, feature_view_2], - [entity1], - RepoConfig(provider="local", project="test"), - ) - feature_service.infer_features( - fvs_to_update={ - feature_view_1.name: feature_view_1, - feature_view_2.name: feature_view_2, - } - ) - - assert len(feature_view_1.schema) == 0 - assert len(feature_view_1.features) == 3 - assert len(feature_view_2.schema) == 0 - assert len(feature_view_2.features) == 3 - assert len(feature_service.feature_view_projections[0].features) == 1 - assert len(feature_service.feature_view_projections[1].features) == 3 - - -# TODO(felixwang9817): Add tests that interact with field mapping. diff --git a/sdk/python/tests/integration/registration/test_registry.py b/sdk/python/tests/integration/registration/test_registry.py index e192657074..0cc161d997 100644 --- a/sdk/python/tests/integration/registration/test_registry.py +++ b/sdk/python/tests/integration/registration/test_registry.py @@ -14,33 +14,19 @@ import os import time from datetime import timedelta -from tempfile import mkstemp -import pandas as pd import pytest from pytest_lazyfixture import lazy_fixture from feast import FileSource -from feast.aggregation import Aggregation -from feast.data_format import AvroFormat, ParquetFormat -from feast.data_source import KafkaSource +from feast.data_format import ParquetFormat from feast.entity import Entity -from feast.feature import Feature from feast.feature_view import FeatureView from feast.field import Field -from feast.on_demand_feature_view import RequestSource, on_demand_feature_view from feast.registry import Registry from feast.repo_config import RegistryConfig -from feast.stream_feature_view import StreamFeatureView -from feast.types import Array, Bytes, Float32, Int32, Int64, String -from feast.value_type import ValueType - - -@pytest.fixture -def local_registry() -> Registry: - fd, registry_path = mkstemp() - registry_config = RegistryConfig(path=registry_path, cache_ttl_seconds=600) - return Registry(registry_config, None) +from feast.types import Array, Bytes, Int64, String +from tests.utils.e2e_test_validation import validate_registry_data_source_apply @pytest.fixture @@ -74,52 +60,6 @@ def s3_registry() -> Registry: return Registry(registry_config, None) -@pytest.mark.parametrize( - "test_registry", - [lazy_fixture("local_registry")], -) -def test_apply_entity_success(test_registry): - entity = Entity( - name="driver_car_id", - description="Car driver id", - tags={"team": "matchmaking"}, - ) - - project = "project" - - # Register Entity - test_registry.apply_entity(entity, project) - - entities = test_registry.list_entities(project) - - entity = entities[0] - assert ( - len(entities) == 1 - and entity.name == "driver_car_id" - and entity.description == "Car driver id" - and "team" in entity.tags - and entity.tags["team"] == "matchmaking" - ) - - entity = test_registry.get_entity("driver_car_id", project) - assert ( - entity.name == "driver_car_id" - and entity.description == "Car driver id" - and "team" in entity.tags - and entity.tags["team"] == "matchmaking" - ) - - test_registry.delete_entity("driver_car_id", project) - entities = test_registry.list_entities(project) - assert len(entities) == 0 - - test_registry.teardown() - - # Will try to reload registry, which will fail because the file has been deleted - with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto(project=project) - - @pytest.mark.integration @pytest.mark.parametrize( "test_registry", @@ -163,355 +103,6 @@ def test_apply_entity_integration(test_registry): test_registry._get_registry_proto(project=project) -@pytest.mark.parametrize( - "test_registry", - [lazy_fixture("local_registry")], -) -def test_apply_feature_view_success(test_registry): - # Create Feature Views - batch_source = FileSource( - file_format=ParquetFormat(), - path="file://feast/*", - timestamp_field="ts_col", - created_timestamp_column="timestamp", - ) - - entity = Entity(name="fs1_my_entity_1", join_keys=["test"]) - - fv1 = FeatureView( - name="my_feature_view_1", - schema=[ - Field(name="fs1_my_feature_1", dtype=Int64), - Field(name="fs1_my_feature_2", dtype=String), - Field(name="fs1_my_feature_3", dtype=Array(String)), - Field(name="fs1_my_feature_4", dtype=Array(Bytes)), - ], - entities=[entity], - tags={"team": "matchmaking"}, - batch_source=batch_source, - ttl=timedelta(minutes=5), - ) - - project = "project" - - # Register Feature View - test_registry.apply_feature_view(fv1, project) - - feature_views = test_registry.list_feature_views(project) - - # List Feature Views - assert ( - len(feature_views) == 1 - and feature_views[0].name == "my_feature_view_1" - and feature_views[0].features[0].name == "fs1_my_feature_1" - and feature_views[0].features[0].dtype == Int64 - and feature_views[0].features[1].name == "fs1_my_feature_2" - and feature_views[0].features[1].dtype == String - and feature_views[0].features[2].name == "fs1_my_feature_3" - and feature_views[0].features[2].dtype == Array(String) - and feature_views[0].features[3].name == "fs1_my_feature_4" - and feature_views[0].features[3].dtype == Array(Bytes) - and feature_views[0].entities[0] == "fs1_my_entity_1" - ) - - feature_view = test_registry.get_feature_view("my_feature_view_1", project) - assert ( - feature_view.name == "my_feature_view_1" - and feature_view.features[0].name == "fs1_my_feature_1" - and feature_view.features[0].dtype == Int64 - and feature_view.features[1].name == "fs1_my_feature_2" - and feature_view.features[1].dtype == String - and feature_view.features[2].name == "fs1_my_feature_3" - and feature_view.features[2].dtype == Array(String) - and feature_view.features[3].name == "fs1_my_feature_4" - and feature_view.features[3].dtype == Array(Bytes) - and feature_view.entities[0] == "fs1_my_entity_1" - ) - - test_registry.delete_feature_view("my_feature_view_1", project) - feature_views = test_registry.list_feature_views(project) - assert len(feature_views) == 0 - - test_registry.teardown() - - # Will try to reload registry, which will fail because the file has been deleted - with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto(project=project) - - -@pytest.mark.parametrize( - "test_registry", - [lazy_fixture("local_registry")], -) -def test_apply_on_demand_feature_view_success(test_registry): - # Create Feature Views - driver_stats = FileSource( - name="driver_stats_source", - path="data/driver_stats_lat_lon.parquet", - timestamp_field="event_timestamp", - created_timestamp_column="created", - description="A table describing the stats of a driver based on hourly logs", - owner="test2@gmail.com", - ) - - driver_daily_features_view = FeatureView( - name="driver_daily_features", - entities=["driver"], - ttl=timedelta(seconds=8640000000), - schema=[ - Field(name="daily_miles_driven", dtype=Float32), - Field(name="lat", dtype=Float32), - Field(name="lon", dtype=Float32), - Field(name="string_feature", dtype=String), - ], - online=True, - source=driver_stats, - tags={"production": "True"}, - owner="test2@gmail.com", - ) - - @on_demand_feature_view( - sources=[driver_daily_features_view], - schema=[Field(name="first_char", dtype=String)], - ) - def location_features_from_push(inputs: pd.DataFrame) -> pd.DataFrame: - df = pd.DataFrame() - df["first_char"] = inputs["string_feature"].str[:1].astype("string") - return df - - project = "project" - - # Register Feature View - test_registry.apply_feature_view(location_features_from_push, project) - - feature_views = test_registry.list_on_demand_feature_views(project) - - # List Feature Views - assert ( - len(feature_views) == 1 - and feature_views[0].name == "location_features_from_push" - and feature_views[0].features[0].name == "first_char" - and feature_views[0].features[0].dtype == String - ) - - feature_view = test_registry.get_on_demand_feature_view( - "location_features_from_push", project - ) - assert ( - feature_view.name == "location_features_from_push" - and feature_view.features[0].name == "first_char" - and feature_view.features[0].dtype == String - ) - - test_registry.delete_feature_view("location_features_from_push", project) - feature_views = test_registry.list_on_demand_feature_views(project) - assert len(feature_views) == 0 - - test_registry.teardown() - - # Will try to reload registry, which will fail because the file has been deleted - with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto(project=project) - - -@pytest.mark.parametrize( - "test_registry", - [lazy_fixture("local_registry")], -) -def test_apply_stream_feature_view_success(test_registry): - # Create Feature Views - def simple_udf(x: int): - return x + 3 - - entity = Entity(name="driver_entity", join_keys=["test_key"]) - - stream_source = KafkaSource( - name="kafka", - timestamp_field="event_timestamp", - kafka_bootstrap_servers="", - message_format=AvroFormat(""), - topic="topic", - batch_source=FileSource(path="some path"), - watermark_delay_threshold=timedelta(days=1), - ) - - sfv = StreamFeatureView( - name="test kafka stream feature view", - entities=[entity], - ttl=timedelta(days=30), - owner="test@example.com", - online=True, - schema=[Field(name="dummy_field", dtype=Float32)], - description="desc", - aggregations=[ - Aggregation( - column="dummy_field", - function="max", - time_window=timedelta(days=1), - ), - Aggregation( - column="dummy_field2", - function="count", - time_window=timedelta(days=24), - ), - ], - timestamp_field="event_timestamp", - mode="spark", - source=stream_source, - udf=simple_udf, - tags={}, - ) - - project = "project" - - # Register Feature View - test_registry.apply_feature_view(sfv, project) - - stream_feature_views = test_registry.list_stream_feature_views(project) - - # List Feature Views - assert len(stream_feature_views) == 1 - assert stream_feature_views[0] == sfv - - test_registry.delete_feature_view("test kafka stream feature view", project) - stream_feature_views = test_registry.list_stream_feature_views(project) - assert len(stream_feature_views) == 0 - - test_registry.teardown() - - # Will try to reload registry, which will fail because the file has been deleted - with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto(project=project) - - -@pytest.mark.parametrize( - "test_registry", - [lazy_fixture("local_registry")], -) -# TODO(kevjumba): remove this in feast 0.24 when deprecating -@pytest.mark.parametrize( - "request_source_schema", - [[Field(name="my_input_1", dtype=Int32)], {"my_input_1": ValueType.INT32}], -) -def test_modify_feature_views_success(test_registry, request_source_schema): - # Create Feature Views - batch_source = FileSource( - file_format=ParquetFormat(), - path="file://feast/*", - timestamp_field="ts_col", - created_timestamp_column="timestamp", - ) - - request_source = RequestSource( - name="request_source", - schema=request_source_schema, - ) - - entity = Entity(name="fs1_my_entity_1", join_keys=["test"]) - - fv1 = FeatureView( - name="my_feature_view_1", - schema=[Field(name="fs1_my_feature_1", dtype=Int64)], - entities=[entity], - tags={"team": "matchmaking"}, - batch_source=batch_source, - ttl=timedelta(minutes=5), - ) - - @on_demand_feature_view( - features=[ - Feature(name="odfv1_my_feature_1", dtype=ValueType.STRING), - Feature(name="odfv1_my_feature_2", dtype=ValueType.INT32), - ], - sources=[request_source], - ) - def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: - data = pd.DataFrame() - data["odfv1_my_feature_1"] = feature_df["my_input_1"].astype("category") - data["odfv1_my_feature_2"] = feature_df["my_input_1"].astype("int32") - return data - - project = "project" - - # Register Feature Views - test_registry.apply_feature_view(odfv1, project) - test_registry.apply_feature_view(fv1, project) - - # Modify odfv by changing a single feature dtype - @on_demand_feature_view( - features=[ - Feature(name="odfv1_my_feature_1", dtype=ValueType.FLOAT), - Feature(name="odfv1_my_feature_2", dtype=ValueType.INT32), - ], - sources=[request_source], - ) - def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: - data = pd.DataFrame() - data["odfv1_my_feature_1"] = feature_df["my_input_1"].astype("float") - data["odfv1_my_feature_2"] = feature_df["my_input_1"].astype("int32") - return data - - # Apply the modified odfv - test_registry.apply_feature_view(odfv1, project) - - # Check odfv - on_demand_feature_views = test_registry.list_on_demand_feature_views(project) - - assert ( - len(on_demand_feature_views) == 1 - and on_demand_feature_views[0].name == "odfv1" - and on_demand_feature_views[0].features[0].name == "odfv1_my_feature_1" - and on_demand_feature_views[0].features[0].dtype == Float32 - and on_demand_feature_views[0].features[1].name == "odfv1_my_feature_2" - and on_demand_feature_views[0].features[1].dtype == Int32 - ) - request_schema = on_demand_feature_views[0].get_request_data_schema() - assert ( - list(request_schema.keys())[0] == "my_input_1" - and list(request_schema.values())[0] == ValueType.INT32 - ) - - feature_view = test_registry.get_on_demand_feature_view("odfv1", project) - assert ( - feature_view.name == "odfv1" - and feature_view.features[0].name == "odfv1_my_feature_1" - and feature_view.features[0].dtype == Float32 - and feature_view.features[1].name == "odfv1_my_feature_2" - and feature_view.features[1].dtype == Int32 - ) - request_schema = feature_view.get_request_data_schema() - assert ( - list(request_schema.keys())[0] == "my_input_1" - and list(request_schema.values())[0] == ValueType.INT32 - ) - - # Make sure fv1 is untouched - feature_views = test_registry.list_feature_views(project) - - # List Feature Views - assert ( - len(feature_views) == 1 - and feature_views[0].name == "my_feature_view_1" - and feature_views[0].features[0].name == "fs1_my_feature_1" - and feature_views[0].features[0].dtype == Int64 - and feature_views[0].entities[0] == "fs1_my_entity_1" - ) - - feature_view = test_registry.get_feature_view("my_feature_view_1", project) - assert ( - feature_view.name == "my_feature_view_1" - and feature_view.features[0].name == "fs1_my_feature_1" - and feature_view.features[0].dtype == Int64 - and feature_view.entities[0] == "fs1_my_entity_1" - ) - - test_registry.teardown() - - # Will try to reload registry, which will fail because the file has been deleted - with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto(project=project) - - @pytest.mark.integration @pytest.mark.parametrize( "test_registry", @@ -595,163 +186,4 @@ def test_apply_feature_view_integration(test_registry): [lazy_fixture("gcs_registry"), lazy_fixture("s3_registry")], ) def test_apply_data_source_integration(test_registry: Registry): - run_test_data_source_apply(test_registry) - - -@pytest.mark.parametrize( - "test_registry", - [lazy_fixture("local_registry")], -) -def test_apply_data_source(test_registry: Registry): - run_test_data_source_apply(test_registry) - - -def run_test_data_source_apply(test_registry: Registry): - # Create Feature Views - batch_source = FileSource( - name="test_source", - file_format=ParquetFormat(), - path="file://feast/*", - timestamp_field="ts_col", - created_timestamp_column="timestamp", - ) - - entity = Entity(name="fs1_my_entity_1", join_keys=["test"]) - - fv1 = FeatureView( - name="my_feature_view_1", - schema=[ - Field(name="fs1_my_feature_1", dtype=Int64), - Field(name="fs1_my_feature_2", dtype=String), - Field(name="fs1_my_feature_3", dtype=Array(String)), - Field(name="fs1_my_feature_4", dtype=Array(Bytes)), - ], - entities=[entity], - tags={"team": "matchmaking"}, - batch_source=batch_source, - ttl=timedelta(minutes=5), - ) - - project = "project" - - # Register data source and feature view - test_registry.apply_data_source(batch_source, project, commit=False) - test_registry.apply_feature_view(fv1, project, commit=True) - - registry_feature_views = test_registry.list_feature_views(project) - registry_data_sources = test_registry.list_data_sources(project) - assert len(registry_feature_views) == 1 - assert len(registry_data_sources) == 1 - registry_feature_view = registry_feature_views[0] - assert registry_feature_view.batch_source == batch_source - registry_data_source = registry_data_sources[0] - assert registry_data_source == batch_source - - # Check that change to batch source propagates - batch_source.timestamp_field = "new_ts_col" - test_registry.apply_data_source(batch_source, project, commit=False) - test_registry.apply_feature_view(fv1, project, commit=True) - registry_feature_views = test_registry.list_feature_views(project) - registry_data_sources = test_registry.list_data_sources(project) - assert len(registry_feature_views) == 1 - assert len(registry_data_sources) == 1 - registry_feature_view = registry_feature_views[0] - assert registry_feature_view.batch_source == batch_source - registry_batch_source = test_registry.list_data_sources(project)[0] - assert registry_batch_source == batch_source - - test_registry.teardown() - - # Will try to reload registry, which will fail because the file has been deleted - with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto(project=project) - - -def test_commit(): - fd, registry_path = mkstemp() - registry_config = RegistryConfig(path=registry_path, cache_ttl_seconds=600) - test_registry = Registry(registry_config, None) - - entity = Entity( - name="driver_car_id", - description="Car driver id", - tags={"team": "matchmaking"}, - ) - - project = "project" - - # Register Entity without commiting - test_registry.apply_entity(entity, project, commit=False) - assert test_registry.cached_registry_proto - assert len(test_registry.cached_registry_proto.project_metadata) == 1 - project_metadata = test_registry.cached_registry_proto.project_metadata[0] - project_uuid = project_metadata.project_uuid - assert len(project_uuid) == 36 - assert_project_uuid(project_uuid, test_registry) - - # Retrieving the entity should still succeed - entities = test_registry.list_entities(project, allow_cache=True) - entity = entities[0] - assert ( - len(entities) == 1 - and entity.name == "driver_car_id" - and entity.description == "Car driver id" - and "team" in entity.tags - and entity.tags["team"] == "matchmaking" - ) - assert_project_uuid(project_uuid, test_registry) - - entity = test_registry.get_entity("driver_car_id", project, allow_cache=True) - assert ( - entity.name == "driver_car_id" - and entity.description == "Car driver id" - and "team" in entity.tags - and entity.tags["team"] == "matchmaking" - ) - assert_project_uuid(project_uuid, test_registry) - - # Create new registry that points to the same store - registry_with_same_store = Registry(registry_config, None) - - # Retrieving the entity should fail since the store is empty - entities = registry_with_same_store.list_entities(project) - assert len(entities) == 0 - assert_project_uuid(project_uuid, registry_with_same_store) - - # commit from the original registry - test_registry.commit() - - # Reconstruct the new registry in order to read the newly written store - registry_with_same_store = Registry(registry_config, None) - - # Retrieving the entity should now succeed - entities = registry_with_same_store.list_entities(project) - entity = entities[0] - assert ( - len(entities) == 1 - and entity.name == "driver_car_id" - and entity.description == "Car driver id" - and "team" in entity.tags - and entity.tags["team"] == "matchmaking" - ) - assert_project_uuid(project_uuid, registry_with_same_store) - - entity = test_registry.get_entity("driver_car_id", project) - assert ( - entity.name == "driver_car_id" - and entity.description == "Car driver id" - and "team" in entity.tags - and entity.tags["team"] == "matchmaking" - ) - - test_registry.teardown() - - # Will try to reload registry, which will fail because the file has been deleted - with pytest.raises(FileNotFoundError): - test_registry._get_registry_proto(project=project) - - -def assert_project_uuid(project_uuid, test_registry): - assert len(test_registry.cached_registry_proto.project_metadata) == 1 - project_metadata = test_registry.cached_registry_proto.project_metadata[0] - assert project_metadata.project_uuid == project_uuid + validate_registry_data_source_apply(test_registry) diff --git a/sdk/python/tests/integration/registration/test_stream_feature_view_apply.py b/sdk/python/tests/integration/registration/test_stream_feature_view_apply.py deleted file mode 100644 index 22a8d8a699..0000000000 --- a/sdk/python/tests/integration/registration/test_stream_feature_view_apply.py +++ /dev/null @@ -1,153 +0,0 @@ -from datetime import timedelta - -import pytest - -from feast.aggregation import Aggregation -from feast.data_format import AvroFormat -from feast.data_source import KafkaSource -from feast.entity import Entity -from feast.field import Field -from feast.stream_feature_view import stream_feature_view -from feast.types import Float32 -from tests.utils.cli_utils import CliRunner, get_example_repo -from tests.utils.data_source_utils import prep_file_source - - -@pytest.mark.integration -def test_apply_stream_feature_view(simple_dataset_1) -> None: - """ - Test apply of StreamFeatureView. - """ - runner = CliRunner() - with runner.local_repo( - get_example_repo("example_feature_repo_1.py"), "bigquery" - ) as fs, prep_file_source( - df=simple_dataset_1, timestamp_field="ts_1" - ) as file_source: - entity = Entity(name="driver_entity", join_keys=["test_key"]) - - stream_source = KafkaSource( - name="kafka", - timestamp_field="event_timestamp", - kafka_bootstrap_servers="", - message_format=AvroFormat(""), - topic="topic", - batch_source=file_source, - watermark_delay_threshold=timedelta(days=1), - ) - - @stream_feature_view( - entities=[entity], - ttl=timedelta(days=30), - owner="test@example.com", - online=True, - schema=[Field(name="dummy_field", dtype=Float32)], - description="desc", - aggregations=[ - Aggregation( - column="dummy_field", - function="max", - time_window=timedelta(days=1), - ), - Aggregation( - column="dummy_field2", - function="count", - time_window=timedelta(days=24), - ), - ], - timestamp_field="event_timestamp", - mode="spark", - source=stream_source, - tags={}, - ) - def simple_sfv(df): - return df - - fs.apply([entity, simple_sfv]) - - stream_feature_views = fs.list_stream_feature_views() - assert len(stream_feature_views) == 1 - assert stream_feature_views[0] == simple_sfv - - features = fs.get_online_features( - features=["simple_sfv:dummy_field"], - entity_rows=[{"test_key": 1001}], - ).to_dict(include_event_timestamps=True) - - assert "test_key" in features - assert features["test_key"] == [1001] - assert "dummy_field" in features - assert features["dummy_field"] == [None] - - -@pytest.mark.integration -def test_stream_feature_view_udf(simple_dataset_1) -> None: - """ - Test apply of StreamFeatureView udfs are serialized correctly and usable. - """ - runner = CliRunner() - with runner.local_repo( - get_example_repo("example_feature_repo_1.py"), "bigquery" - ) as fs, prep_file_source( - df=simple_dataset_1, timestamp_field="ts_1" - ) as file_source: - entity = Entity(name="driver_entity", join_keys=["test_key"]) - - stream_source = KafkaSource( - name="kafka", - timestamp_field="event_timestamp", - kafka_bootstrap_servers="", - message_format=AvroFormat(""), - topic="topic", - batch_source=file_source, - watermark_delay_threshold=timedelta(days=1), - ) - - @stream_feature_view( - entities=[entity], - ttl=timedelta(days=30), - owner="test@example.com", - online=True, - schema=[Field(name="dummy_field", dtype=Float32)], - description="desc", - aggregations=[ - Aggregation( - column="dummy_field", - function="max", - time_window=timedelta(days=1), - ), - Aggregation( - column="dummy_field2", - function="count", - time_window=timedelta(days=24), - ), - ], - timestamp_field="event_timestamp", - mode="spark", - source=stream_source, - tags={}, - ) - def pandas_view(pandas_df): - import pandas as pd - - assert type(pandas_df) == pd.DataFrame - df = pandas_df.transform(lambda x: x + 10, axis=1) - df.insert(2, "C", [20.2, 230.0, 34.0], True) - return df - - import pandas as pd - - fs.apply([entity, pandas_view]) - - stream_feature_views = fs.list_stream_feature_views() - assert len(stream_feature_views) == 1 - assert stream_feature_views[0] == pandas_view - - sfv = stream_feature_views[0] - - df = pd.DataFrame({"A": [1, 2, 3], "B": [10, 20, 30]}) - new_df = sfv.udf(df) - expected_df = pd.DataFrame( - {"A": [11, 12, 13], "B": [20, 30, 40], "C": [20.2, 230.0, 34.0]} - ) - assert new_df.equals(expected_df) diff --git a/sdk/python/tests/integration/registration/test_universal_cli.py b/sdk/python/tests/integration/registration/test_universal_cli.py new file mode 100644 index 0000000000..3e77f74edd --- /dev/null +++ b/sdk/python/tests/integration/registration/test_universal_cli.py @@ -0,0 +1,166 @@ +import tempfile +import uuid +from pathlib import Path +from textwrap import dedent + +import pytest +from assertpy import assertpy + +from feast.feature_store import FeatureStore +from tests.integration.feature_repos.repo_configuration import Environment +from tests.utils.basic_read_write_test import basic_rw_test +from tests.utils.cli_repo_creator import CliRunner, get_example_repo +from tests.utils.e2e_test_validation import ( + NULLABLE_ONLINE_STORE_CONFIGS, + make_feature_store_yaml, +) + + +@pytest.mark.integration +@pytest.mark.universal_offline_stores +def test_universal_cli(environment: Environment): + project = f"test_universal_cli_{str(uuid.uuid4()).replace('-', '')[:8]}" + runner = CliRunner() + + with tempfile.TemporaryDirectory() as repo_dir_name: + try: + repo_path = Path(repo_dir_name) + feature_store_yaml = make_feature_store_yaml( + project, environment.test_repo_config, repo_path + ) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text(dedent(feature_store_yaml)) + + repo_example = repo_path / "example.py" + repo_example.write_text(get_example_repo("example_feature_repo_1.py")) + result = runner.run(["apply"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + + # Store registry contents, to be compared later. + fs = FeatureStore(repo_path=str(repo_path)) + registry_dict = fs.registry.to_dict(project=project) + # Save only the specs, not the metadata. + registry_specs = { + key: [fco["spec"] if "spec" in fco else fco for fco in value] + for key, value in registry_dict.items() + } + + # entity & feature view list commands should succeed + result = runner.run(["entities", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["feature-views", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["feature-services", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["data-sources", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + + # entity & feature view describe commands should succeed when objects exist + result = runner.run(["entities", "describe", "driver"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run( + ["feature-views", "describe", "driver_locations"], cwd=repo_path + ) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run( + ["feature-services", "describe", "driver_locations_service"], + cwd=repo_path, + ) + assertpy.assert_that(result.returncode).is_equal_to(0) + assertpy.assert_that(fs.list_feature_views()).is_length(4) + result = runner.run( + ["data-sources", "describe", "customer_profile_source"], + cwd=repo_path, + ) + assertpy.assert_that(result.returncode).is_equal_to(0) + assertpy.assert_that(fs.list_data_sources()).is_length(4) + + # entity & feature view describe commands should fail when objects don't exist + result = runner.run(["entities", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["feature-views", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["feature-services", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["data-sources", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + + # Doing another apply should be a no op, and should not cause errors + result = runner.run(["apply"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + basic_rw_test( + FeatureStore(repo_path=str(repo_path), config=None), + view_name="driver_locations", + ) + + # Confirm that registry contents have not changed. + registry_dict = fs.registry.to_dict(project=project) + assertpy.assert_that(registry_specs).is_equal_to( + { + key: [fco["spec"] if "spec" in fco else fco for fco in value] + for key, value in registry_dict.items() + } + ) + + result = runner.run(["teardown"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + finally: + runner.run(["teardown"], cwd=repo_path) + + +@pytest.mark.integration +@pytest.mark.universal_offline_stores +def test_odfv_apply(environment) -> None: + project = f"test_odfv_apply{str(uuid.uuid4()).replace('-', '')[:8]}" + runner = CliRunner() + + with tempfile.TemporaryDirectory() as repo_dir_name: + try: + repo_path = Path(repo_dir_name) + feature_store_yaml = make_feature_store_yaml( + project, environment.test_repo_config, repo_path + ) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text(dedent(feature_store_yaml)) + + repo_example = repo_path / "example.py" + repo_example.write_text(get_example_repo("on_demand_feature_view_repo.py")) + result = runner.run(["apply"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + + # entity & feature view list commands should succeed + result = runner.run(["entities", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["on-demand-feature-views", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + finally: + runner.run(["teardown"], cwd=repo_path) + + +@pytest.mark.integration +@pytest.mark.parametrize("test_nullable_online_store", NULLABLE_ONLINE_STORE_CONFIGS) +def test_nullable_online_store(test_nullable_online_store) -> None: + project = f"test_nullable_online_store{str(uuid.uuid4()).replace('-', '')[:8]}" + runner = CliRunner() + + with tempfile.TemporaryDirectory() as repo_dir_name: + try: + repo_path = Path(repo_dir_name) + feature_store_yaml = make_feature_store_yaml( + project, test_nullable_online_store, repo_path + ) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text(dedent(feature_store_yaml)) + + repo_example = repo_path / "example.py" + repo_example.write_text(get_example_repo("example_feature_repo_1.py")) + result = runner.run(["apply"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + finally: + runner.run(["teardown"], cwd=repo_path) diff --git a/sdk/python/tests/integration/registration/test_universal_types.py b/sdk/python/tests/integration/registration/test_universal_types.py index ad29531e11..1d90eee13e 100644 --- a/sdk/python/tests/integration/registration/test_universal_types.py +++ b/sdk/python/tests/integration/registration/test_universal_types.py @@ -26,97 +26,6 @@ logger = logging.getLogger(__name__) -def populate_test_configs(offline: bool): - feature_dtypes = [ - "int32", - "int64", - "float", - "bool", - "datetime", - ] - configs: List[TypeTestConfig] = [] - for feature_dtype in feature_dtypes: - for feature_is_list in [True, False]: - for has_empty_list in [True, False]: - # For non list features `has_empty_list` does nothing - if feature_is_list is False and has_empty_list is True: - continue - - configs.append( - TypeTestConfig( - feature_dtype=feature_dtype, - feature_is_list=feature_is_list, - has_empty_list=has_empty_list, - ) - ) - return configs - - -@dataclass(frozen=True, repr=True) -class TypeTestConfig: - feature_dtype: str - feature_is_list: bool - has_empty_list: bool - - -OFFLINE_TYPE_TEST_CONFIGS: List[TypeTestConfig] = populate_test_configs(offline=True) -ONLINE_TYPE_TEST_CONFIGS: List[TypeTestConfig] = populate_test_configs(offline=False) - - -@pytest.fixture( - params=OFFLINE_TYPE_TEST_CONFIGS, - ids=[str(c) for c in OFFLINE_TYPE_TEST_CONFIGS], -) -def offline_types_test_fixtures(request, environment): - config: TypeTestConfig = request.param - if ( - environment.test_repo_config.provider == "aws" - and config.feature_is_list is True - ): - pytest.skip("Redshift doesn't support list features") - - return get_fixtures(request, environment) - - -@pytest.fixture( - params=ONLINE_TYPE_TEST_CONFIGS, - ids=[str(c) for c in ONLINE_TYPE_TEST_CONFIGS], -) -def online_types_test_fixtures(request, environment): - return get_fixtures(request, environment) - - -def get_fixtures(request, environment): - config: TypeTestConfig = request.param - # Lower case needed because Redshift lower-cases all table names - destination_name = ( - f"feature_type_{config.feature_dtype}{config.feature_is_list}".replace( - ".", "" - ).lower() - ) - config = request.param - df = create_basic_driver_dataset( - Int64, - config.feature_dtype, - config.feature_is_list, - config.has_empty_list, - ) - data_source = environment.data_source_creator.create_data_source( - df, - destination_name=destination_name, - field_mapping={"ts_1": "ts"}, - ) - fv = create_feature_view( - destination_name, - config.feature_dtype, - config.feature_is_list, - config.has_empty_list, - data_source, - ) - - return config, data_source, fv - - @pytest.mark.integration @pytest.mark.universal_offline_stores @pytest.mark.parametrize("entity_type", [Int32, Int64, String]) @@ -397,3 +306,94 @@ def assert_expected_arrow_types( assert arrow_type_checker(pa_type.value_type) else: assert arrow_type_checker(pa_type) + + +def populate_test_configs(offline: bool): + feature_dtypes = [ + "int32", + "int64", + "float", + "bool", + "datetime", + ] + configs: List[TypeTestConfig] = [] + for feature_dtype in feature_dtypes: + for feature_is_list in [True, False]: + for has_empty_list in [True, False]: + # For non list features `has_empty_list` does nothing + if feature_is_list is False and has_empty_list is True: + continue + + configs.append( + TypeTestConfig( + feature_dtype=feature_dtype, + feature_is_list=feature_is_list, + has_empty_list=has_empty_list, + ) + ) + return configs + + +@dataclass(frozen=True, repr=True) +class TypeTestConfig: + feature_dtype: str + feature_is_list: bool + has_empty_list: bool + + +OFFLINE_TYPE_TEST_CONFIGS: List[TypeTestConfig] = populate_test_configs(offline=True) +ONLINE_TYPE_TEST_CONFIGS: List[TypeTestConfig] = populate_test_configs(offline=False) + + +@pytest.fixture( + params=OFFLINE_TYPE_TEST_CONFIGS, + ids=[str(c) for c in OFFLINE_TYPE_TEST_CONFIGS], +) +def offline_types_test_fixtures(request, environment): + config: TypeTestConfig = request.param + if ( + environment.test_repo_config.provider == "aws" + and config.feature_is_list is True + ): + pytest.skip("Redshift doesn't support list features") + + return get_fixtures(request, environment) + + +@pytest.fixture( + params=ONLINE_TYPE_TEST_CONFIGS, + ids=[str(c) for c in ONLINE_TYPE_TEST_CONFIGS], +) +def online_types_test_fixtures(request, environment): + return get_fixtures(request, environment) + + +def get_fixtures(request, environment): + config: TypeTestConfig = request.param + # Lower case needed because Redshift lower-cases all table names + destination_name = ( + f"feature_type_{config.feature_dtype}{config.feature_is_list}".replace( + ".", "" + ).lower() + ) + config = request.param + df = create_basic_driver_dataset( + Int64, + config.feature_dtype, + config.feature_is_list, + config.has_empty_list, + ) + data_source = environment.data_source_creator.create_data_source( + df, + destination_name=destination_name, + field_mapping={"ts_1": "ts"}, + ) + fv = create_feature_view( + destination_name, + config.feature_dtype, + config.feature_is_list, + config.has_empty_list, + data_source, + ) + + return config, data_source, fv diff --git a/sdk/python/tests/integration/scaffolding/test_partial_apply.py b/sdk/python/tests/integration/scaffolding/test_partial_apply.py deleted file mode 100644 index e5a7206b96..0000000000 --- a/sdk/python/tests/integration/scaffolding/test_partial_apply.py +++ /dev/null @@ -1,48 +0,0 @@ -from datetime import timedelta - -import pytest - -from feast import BigQuerySource, Entity, FeatureView, Field -from feast.types import Float32, String -from tests.utils.cli_utils import CliRunner, get_example_repo -from tests.utils.online_read_write_test import basic_rw_test - - -@pytest.mark.integration -def test_partial() -> None: - """ - Add another table to existing repo using partial apply API. Make sure both the table - applied via CLI apply and the new table are passing RW test. - """ - - runner = CliRunner() - with runner.local_repo( - get_example_repo("example_feature_repo_1.py"), "bigquery" - ) as store: - driver = Entity(name="driver", join_keys=["test"]) - - driver_locations_source = BigQuerySource( - table="feast-oss.public.drivers", - timestamp_field="event_timestamp", - created_timestamp_column="created_timestamp", - ) - - driver_locations_100 = FeatureView( - name="driver_locations_100", - entities=[driver], - ttl=timedelta(days=1), - schema=[ - Field(name="lat", dtype=Float32), - Field(name="lon", dtype=String), - Field(name="name", dtype=String), - Field(name="test", dtype=String), - ], - online=True, - batch_source=driver_locations_source, - tags={}, - ) - - store.apply([driver_locations_100]) - - basic_rw_test(store, view_name="driver_locations") - basic_rw_test(store, view_name="driver_locations_100") diff --git a/sdk/python/tests/unit/cli/test_cli.py b/sdk/python/tests/unit/cli/test_cli.py new file mode 100644 index 0000000000..9b535ce8fb --- /dev/null +++ b/sdk/python/tests/unit/cli/test_cli.py @@ -0,0 +1,140 @@ +import tempfile +from contextlib import contextmanager +from pathlib import Path +from textwrap import dedent + +from assertpy import assertpy + +from tests.utils.cli_repo_creator import CliRunner + + +def test_3rd_party_providers() -> None: + """ + Test running apply on third party providers + """ + runner = CliRunner() + # Check with incorrect built-in provider name (no dots) + with setup_third_party_provider_repo("feast123") as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(1) + assertpy.assert_that(output).contains(b"Provider 'feast123' is not implemented") + # Check with incorrect third-party provider name (with dots) + with setup_third_party_provider_repo("feast_foo.Provider") as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(1) + assertpy.assert_that(output).contains( + b"Could not import module 'feast_foo' while attempting to load class 'Provider'" + ) + # Check with incorrect third-party provider name (with dots) + with setup_third_party_provider_repo("foo.FooProvider") as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(1) + assertpy.assert_that(output).contains( + b"Could not import class 'FooProvider' from module 'foo'" + ) + # Check with correct third-party provider name + with setup_third_party_provider_repo("foo.provider.FooProvider") as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(0) + + +def test_3rd_party_registry_store() -> None: + """ + Test running apply on third party registry stores + """ + runner = CliRunner() + # Check with incorrect built-in provider name (no dots) + with setup_third_party_registry_store_repo("feast123") as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(1) + assertpy.assert_that(output).contains( + b'Registry store class name should end with "RegistryStore"' + ) + # Check with incorrect third-party registry store name (with dots) + with setup_third_party_registry_store_repo("feast_foo.RegistryStore") as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(1) + assertpy.assert_that(output).contains( + b"Could not import module 'feast_foo' while attempting to load class 'RegistryStore'" + ) + # Check with incorrect third-party registry store name (with dots) + with setup_third_party_registry_store_repo("foo.FooRegistryStore") as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(1) + assertpy.assert_that(output).contains( + b"Could not import class 'FooRegistryStore' from module 'foo'" + ) + # Check with correct third-party registry store name + with setup_third_party_registry_store_repo( + "foo.registry_store.FooRegistryStore" + ) as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(0) + + +@contextmanager +def setup_third_party_provider_repo(provider_name: str): + with tempfile.TemporaryDirectory() as repo_dir_name: + + # Construct an example repo in a temporary dir + repo_path = Path(repo_dir_name) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text( + dedent( + f""" + project: foo + registry: data/registry.db + provider: {provider_name} + online_store: + path: data/online_store.db + type: sqlite + offline_store: + type: file + """ + ) + ) + + (repo_path / "foo").mkdir() + repo_example = repo_path / "foo/provider.py" + repo_example.write_text( + (Path(__file__).parents[2] / "foo_provider.py").read_text() + ) + + yield repo_path + + +@contextmanager +def setup_third_party_registry_store_repo(registry_store: str): + with tempfile.TemporaryDirectory() as repo_dir_name: + + # Construct an example repo in a temporary dir + repo_path = Path(repo_dir_name) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text( + dedent( + f""" + project: foo + registry: + registry_store_type: {registry_store} + path: foobar://foo.bar + provider: local + online_store: + path: data/online_store.db + type: sqlite + offline_store: + type: file + """ + ) + ) + + (repo_path / "foo").mkdir() + repo_example = repo_path / "foo/registry_store.py" + repo_example.write_text( + (Path(__file__).parents[2] / "foo_registry_store.py").read_text() + ) + + yield repo_path diff --git a/sdk/python/tests/integration/registration/test_cli_apply_duplicates.py b/sdk/python/tests/unit/cli/test_cli_apply_duplicates.py similarity index 98% rename from sdk/python/tests/integration/registration/test_cli_apply_duplicates.py rename to sdk/python/tests/unit/cli/test_cli_apply_duplicates.py index bad3b50a80..f61a46516e 100644 --- a/sdk/python/tests/integration/registration/test_cli_apply_duplicates.py +++ b/sdk/python/tests/unit/cli/test_cli_apply_duplicates.py @@ -2,7 +2,7 @@ from pathlib import Path from textwrap import dedent -from tests.utils.cli_utils import CliRunner, get_example_repo +from tests.utils.cli_repo_creator import CliRunner, get_example_repo def test_cli_apply_duplicated_featureview_names() -> None: diff --git a/sdk/python/tests/integration/registration/test_cli_chdir.py b/sdk/python/tests/unit/cli/test_cli_chdir.py similarity index 97% rename from sdk/python/tests/integration/registration/test_cli_chdir.py rename to sdk/python/tests/unit/cli/test_cli_chdir.py index ff26c2f5e2..8260a95efd 100644 --- a/sdk/python/tests/integration/registration/test_cli_chdir.py +++ b/sdk/python/tests/unit/cli/test_cli_chdir.py @@ -2,7 +2,7 @@ from datetime import datetime, timedelta from pathlib import Path -from tests.utils.cli_utils import CliRunner +from tests.utils.cli_repo_creator import CliRunner def test_cli_chdir() -> None: diff --git a/sdk/python/tests/unit/diff/test_registry_diff.py b/sdk/python/tests/unit/diff/test_registry_diff.py index d12fc717f0..0effdfba97 100644 --- a/sdk/python/tests/unit/diff/test_registry_diff.py +++ b/sdk/python/tests/unit/diff/test_registry_diff.py @@ -4,7 +4,7 @@ ) from feast.entity import Entity from feast.feature_view import FeatureView -from tests.utils.data_source_utils import prep_file_source +from tests.utils.data_source_test_creator import prep_file_source def test_tag_objects_for_keep_delete_update_add(simple_dataset_1): diff --git a/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py b/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py index 07e22017b5..c8eca6201f 100644 --- a/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py +++ b/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py @@ -14,10 +14,10 @@ from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import RepoConfig -from tests.utils.online_store_utils import ( - _create_n_customer_test_samples, - _create_test_table, - _insert_data_test_table, +from tests.utils.dynamo_table_creator import ( + create_n_customer_test_samples, + create_test_table, + insert_data_test_table, ) REGISTRY = "s3://test_registry/registry.db" @@ -165,9 +165,9 @@ def test_dynamodb_online_store_online_read( ): """Test DynamoDBOnlineStore online_read method.""" db_table_name = f"{TABLE_NAME}_online_read_{n_samples}" - _create_test_table(PROJECT, db_table_name, REGION) - data = _create_n_customer_test_samples(n=n_samples) - _insert_data_test_table(data, PROJECT, db_table_name, REGION) + create_test_table(PROJECT, db_table_name, REGION) + data = create_n_customer_test_samples(n=n_samples) + insert_data_test_table(data, PROJECT, db_table_name, REGION) entity_keys, features, *rest = zip(*data) returned_items = dynamodb_online_store.online_read( @@ -186,8 +186,8 @@ def test_dynamodb_online_store_online_write_batch( ): """Test DynamoDBOnlineStore online_write_batch method.""" db_table_name = f"{TABLE_NAME}_online_write_batch_{n_samples}" - _create_test_table(PROJECT, db_table_name, REGION) - data = _create_n_customer_test_samples() + create_test_table(PROJECT, db_table_name, REGION) + data = create_n_customer_test_samples() entity_keys, features, *rest = zip(*data) dynamodb_online_store.online_write_batch( @@ -211,10 +211,10 @@ def test_dynamodb_online_store_update(repo_config, dynamodb_online_store): """Test DynamoDBOnlineStore update method.""" # create dummy table to keep db_table_keep_name = f"{TABLE_NAME}_keep_update" - _create_test_table(PROJECT, db_table_keep_name, REGION) + create_test_table(PROJECT, db_table_keep_name, REGION) # create dummy table to delete db_table_delete_name = f"{TABLE_NAME}_delete_update" - _create_test_table(PROJECT, db_table_delete_name, REGION) + create_test_table(PROJECT, db_table_delete_name, REGION) dynamodb_online_store.update( config=repo_config, @@ -240,8 +240,8 @@ def test_dynamodb_online_store_teardown(repo_config, dynamodb_online_store): """Test DynamoDBOnlineStore teardown method.""" db_table_delete_name_one = f"{TABLE_NAME}_delete_teardown_1" db_table_delete_name_two = f"{TABLE_NAME}_delete_teardown_2" - _create_test_table(PROJECT, db_table_delete_name_one, REGION) - _create_test_table(PROJECT, db_table_delete_name_two, REGION) + create_test_table(PROJECT, db_table_delete_name_one, REGION) + create_test_table(PROJECT, db_table_delete_name_two, REGION) dynamodb_online_store.teardown( config=repo_config, @@ -267,9 +267,9 @@ def test_dynamodb_online_store_online_read_unknown_entity( ): """Test DynamoDBOnlineStore online_read method.""" n_samples = 2 - _create_test_table(PROJECT, f"{TABLE_NAME}_unknown_entity_{n_samples}", REGION) - data = _create_n_customer_test_samples(n=n_samples) - _insert_data_test_table( + create_test_table(PROJECT, f"{TABLE_NAME}_unknown_entity_{n_samples}", REGION) + data = create_n_customer_test_samples(n=n_samples) + insert_data_test_table( data, PROJECT, f"{TABLE_NAME}_unknown_entity_{n_samples}", REGION ) @@ -304,8 +304,8 @@ def test_dynamodb_online_store_online_read_unknown_entity( def test_write_batch_non_duplicates(repo_config, dynamodb_online_store): """Test DynamoDBOnline Store deduplicate write batch request items.""" dynamodb_tbl = f"{TABLE_NAME}_batch_non_duplicates" - _create_test_table(PROJECT, dynamodb_tbl, REGION) - data = _create_n_customer_test_samples() + create_test_table(PROJECT, dynamodb_tbl, REGION) + data = create_n_customer_test_samples() data_duplicate = deepcopy(data) dynamodb_resource = boto3.resource("dynamodb", region_name=REGION) table_instance = dynamodb_resource.Table(f"{PROJECT}.{dynamodb_tbl}") @@ -330,9 +330,9 @@ def test_dynamodb_online_store_online_read_unknown_entity_end_of_batch( """ batch_size = repo_config.online_store.batch_size n_samples = batch_size - _create_test_table(PROJECT, f"{TABLE_NAME}_unknown_entity_{n_samples}", REGION) - data = _create_n_customer_test_samples(n=n_samples) - _insert_data_test_table( + create_test_table(PROJECT, f"{TABLE_NAME}_unknown_entity_{n_samples}", REGION) + data = create_n_customer_test_samples(n=n_samples) + insert_data_test_table( data, PROJECT, f"{TABLE_NAME}_unknown_entity_{n_samples}", REGION ) diff --git a/sdk/python/tests/integration/scaffolding/test_repo_config.py b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py similarity index 100% rename from sdk/python/tests/integration/scaffolding/test_repo_config.py rename to sdk/python/tests/unit/infra/scaffolding/test_repo_config.py diff --git a/sdk/python/tests/integration/scaffolding/test_repo_operations.py b/sdk/python/tests/unit/infra/scaffolding/test_repo_operations.py similarity index 100% rename from sdk/python/tests/integration/scaffolding/test_repo_operations.py rename to sdk/python/tests/unit/infra/scaffolding/test_repo_operations.py diff --git a/sdk/python/tests/unit/infra/test_inference_unit_tests.py b/sdk/python/tests/unit/infra/test_inference_unit_tests.py new file mode 100644 index 0000000000..7a564679d6 --- /dev/null +++ b/sdk/python/tests/unit/infra/test_inference_unit_tests.py @@ -0,0 +1,382 @@ +import pandas as pd +import pytest + +from feast import BigQuerySource, FileSource, RedshiftSource, SnowflakeSource +from feast.data_source import RequestSource +from feast.entity import Entity +from feast.errors import DataSourceNoNameException, SpecifiedFeaturesNotPresentError +from feast.feature import Feature +from feast.feature_service import FeatureService +from feast.feature_view import FeatureView +from feast.field import Field +from feast.inference import update_feature_views_with_inferred_features_and_entities +from feast.infra.offline_stores.contrib.spark_offline_store.spark_source import ( + SparkSource, +) +from feast.on_demand_feature_view import on_demand_feature_view +from feast.repo_config import RepoConfig +from feast.types import Float32, Float64, Int64, String, UnixTimestamp, ValueType +from tests.utils.data_source_test_creator import prep_file_source + + +def test_infer_datasource_names_file(): + file_path = "path/to/test.csv" + data_source = FileSource(path=file_path) + assert data_source.name == file_path + + source_name = "my_name" + data_source = FileSource(name=source_name, path=file_path) + assert data_source.name == source_name + + +def test_infer_datasource_names_dwh(): + table = "project.table" + dwh_classes = [BigQuerySource, RedshiftSource, SnowflakeSource, SparkSource] + + for dwh_class in dwh_classes: + data_source = dwh_class(table=table) + assert data_source.name == table + + source_name = "my_name" + data_source_with_table = dwh_class(name=source_name, table=table) + assert data_source_with_table.name == source_name + data_source_with_query = dwh_class( + name=source_name, query=f"SELECT * from {table}" + ) + assert data_source_with_query.name == source_name + + # If we have a query and no name, throw an error + if dwh_class == SparkSource: + with pytest.raises(DataSourceNoNameException): + print(f"Testing dwh {dwh_class}") + data_source = dwh_class(query="test_query") + else: + data_source = dwh_class(query="test_query") + assert data_source.name == "" + + +def test_on_demand_features_type_inference(): + # Create Feature Views + date_request = RequestSource( + name="date_request", + schema=[Field(name="some_date", dtype=UnixTimestamp)], + ) + + @on_demand_feature_view( + sources=[date_request], + schema=[ + Field(name="output", dtype=UnixTimestamp), + Field(name="string_output", dtype=String), + ], + ) + def test_view(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + data["string_output"] = features_df["some_date"].astype(pd.StringDtype()) + return data + + test_view.infer_features() + + @on_demand_feature_view( + # Note: we deliberately use `inputs` instead of `sources` to test that `inputs` + # still works correctly, even though it is deprecated. + # TODO(felixwang9817): Remove references to `inputs` once it is fully deprecated. + inputs={"date_request": date_request}, + features=[ + Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), + Feature(name="object_output", dtype=ValueType.STRING), + ], + ) + def invalid_test_view(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + data["object_output"] = features_df["some_date"].astype(str) + return data + + with pytest.raises(ValueError, match="Value with native type object"): + invalid_test_view.infer_features() + + @on_demand_feature_view( + # Note: we deliberately use positional arguments here to test that they work correctly, + # even though positional arguments are deprecated in favor of keyword arguments. + # TODO(felixwang9817): Remove positional arguments once they are fully deprecated. + [ + Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), + Feature(name="missing", dtype=ValueType.STRING), + ], + {"date_request": date_request}, + ) + def test_view_with_missing_feature(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + return data + + with pytest.raises(SpecifiedFeaturesNotPresentError): + test_view_with_missing_feature.infer_features() + + +# TODO(kevjumba): remove this in feast 0.24 when deprecating +@pytest.mark.parametrize( + "request_source_schema", + [ + [Field(name="some_date", dtype=UnixTimestamp)], + {"some_date": ValueType.UNIX_TIMESTAMP}, + ], +) +def test_datasource_inference(request_source_schema): + # Create Feature Views + date_request = RequestSource( + name="date_request", + schema=request_source_schema, + ) + + @on_demand_feature_view( + # Note: we deliberately use positional arguments here to test that they work correctly, + # even though positional arguments are deprecated in favor of keyword arguments. + # TODO(felixwang9817): Remove positional arguments once they are fully deprecated. + [ + Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), + Feature(name="string_output", dtype=ValueType.STRING), + ], + sources=[date_request], + ) + def test_view(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + data["string_output"] = features_df["some_date"].astype(pd.StringDtype()) + return data + + test_view.infer_features() + + @on_demand_feature_view( + sources=[date_request], + schema=[ + Field(name="output", dtype=UnixTimestamp), + Field(name="object_output", dtype=String), + ], + ) + def invalid_test_view(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + data["object_output"] = features_df["some_date"].astype(str) + return data + + with pytest.raises(ValueError, match="Value with native type object"): + invalid_test_view.infer_features() + + @on_demand_feature_view( + sources=[date_request], + features=[ + Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), + Feature(name="missing", dtype=ValueType.STRING), + ], + ) + def test_view_with_missing_feature(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + return data + + with pytest.raises(SpecifiedFeaturesNotPresentError): + test_view_with_missing_feature.infer_features() + + +def test_feature_view_inference_respects_basic_inference(): + """ + Tests that feature view inference respects the basic inference that occurs during creation. + """ + file_source = FileSource(name="test", path="test path") + entity1 = Entity(name="test1", join_keys=["test_column_1"]) + entity2 = Entity(name="test2", join_keys=["test_column_2"]) + feature_view_1 = FeatureView( + name="test1", + entities=[entity1], + schema=[ + Field(name="feature", dtype=Float32), + Field(name="test_column_1", dtype=String), + ], + source=file_source, + ) + feature_view_2 = FeatureView( + name="test2", + entities=[entity1, entity2], + schema=[ + Field(name="feature", dtype=Float32), + Field(name="test_column_1", dtype=String), + Field(name="test_column_2", dtype=String), + ], + source=file_source, + ) + + assert len(feature_view_1.schema) == 2 + assert len(feature_view_1.features) == 1 + assert len(feature_view_1.entity_columns) == 1 + + update_feature_views_with_inferred_features_and_entities( + [feature_view_1], [entity1], RepoConfig(provider="local", project="test") + ) + assert len(feature_view_1.schema) == 2 + assert len(feature_view_1.features) == 1 + assert len(feature_view_1.entity_columns) == 1 + + assert len(feature_view_2.schema) == 3 + assert len(feature_view_2.features) == 1 + assert len(feature_view_2.entity_columns) == 2 + + update_feature_views_with_inferred_features_and_entities( + [feature_view_2], + [entity1, entity2], + RepoConfig(provider="local", project="test"), + ) + assert len(feature_view_2.schema) == 3 + assert len(feature_view_2.features) == 1 + assert len(feature_view_2.entity_columns) == 2 + + +def test_feature_view_inference_on_entity_columns(simple_dataset_1): + """ + Tests that feature view inference correctly infers entity columns. + """ + with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: + entity1 = Entity(name="test1", join_keys=["id_join_key"]) + feature_view_1 = FeatureView( + name="test1", + entities=[entity1], + schema=[Field(name="int64_col", dtype=Int64)], + source=file_source, + ) + + assert len(feature_view_1.schema) == 1 + assert len(feature_view_1.features) == 1 + assert len(feature_view_1.entity_columns) == 0 + + update_feature_views_with_inferred_features_and_entities( + [feature_view_1], [entity1], RepoConfig(provider="local", project="test") + ) + + # The schema is only used as a parameter, as is therefore not updated during inference. + assert len(feature_view_1.schema) == 1 + + # Since there is already a feature specified, additional features are not inferred. + assert len(feature_view_1.features) == 1 + + # The single entity column is inferred correctly. + assert len(feature_view_1.entity_columns) == 1 + + +def test_feature_view_inference_respects_entity_value_type(simple_dataset_1): + """ + Tests that feature view inference still respects an entity's value type. + """ + # TODO(felixwang9817): Remove this test once entity value_type is removed. + with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: + entity1 = Entity( + name="test1", join_keys=["id_join_key"], value_type=ValueType.STRING + ) + feature_view_1 = FeatureView( + name="test1", + entities=[entity1], + schema=[Field(name="int64_col", dtype=Int64)], + source=file_source, + ) + + assert len(feature_view_1.schema) == 1 + assert len(feature_view_1.features) == 1 + assert len(feature_view_1.entity_columns) == 0 + + update_feature_views_with_inferred_features_and_entities( + [feature_view_1], [entity1], RepoConfig(provider="local", project="test") + ) + + # The schema is only used as a parameter, as is therefore not updated during inference. + assert len(feature_view_1.schema) == 1 + + # Since there is already a feature specified, additional features are not inferred. + assert len(feature_view_1.features) == 1 + + # The single entity column is inferred correctly and has type String. + assert len(feature_view_1.entity_columns) == 1 + assert feature_view_1.entity_columns[0].dtype == String + + +def test_feature_view_inference_on_feature_columns(simple_dataset_1): + """ + Tests that feature view inference correctly infers feature columns. + """ + with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: + entity1 = Entity(name="test1", join_keys=["id_join_key"]) + feature_view_1 = FeatureView( + name="test1", + entities=[entity1], + schema=[Field(name="id_join_key", dtype=Int64)], + source=file_source, + ) + + assert len(feature_view_1.schema) == 1 + assert len(feature_view_1.features) == 0 + assert len(feature_view_1.entity_columns) == 1 + + update_feature_views_with_inferred_features_and_entities( + [feature_view_1], [entity1], RepoConfig(provider="local", project="test") + ) + + # The schema is only used as a parameter, as is therefore not updated during inference. + assert len(feature_view_1.schema) == 1 + + # All three feature columns are inferred correctly. + assert len(feature_view_1.features) == 3 + print(feature_view_1.features) + feature_column_1 = Field(name="float_col", dtype=Float64) + feature_column_2 = Field(name="int64_col", dtype=Int64) + feature_column_3 = Field(name="string_col", dtype=String) + assert feature_column_1 in feature_view_1.features + assert feature_column_2 in feature_view_1.features + assert feature_column_3 in feature_view_1.features + + # The single entity column remains. + assert len(feature_view_1.entity_columns) == 1 + + +def test_update_feature_services_with_inferred_features(simple_dataset_1): + with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: + entity1 = Entity(name="test1", join_keys=["id_join_key"]) + feature_view_1 = FeatureView( + name="test1", + entities=[entity1], + source=file_source, + ) + feature_view_2 = FeatureView( + name="test2", + entities=[entity1], + source=file_source, + ) + + feature_service = FeatureService( + name="fs_1", features=[feature_view_1[["string_col"]], feature_view_2] + ) + assert len(feature_service.feature_view_projections) == 2 + assert len(feature_service.feature_view_projections[0].features) == 0 + assert len(feature_service.feature_view_projections[0].desired_features) == 1 + assert len(feature_service.feature_view_projections[1].features) == 0 + assert len(feature_service.feature_view_projections[1].desired_features) == 0 + + update_feature_views_with_inferred_features_and_entities( + [feature_view_1, feature_view_2], + [entity1], + RepoConfig(provider="local", project="test"), + ) + feature_service.infer_features( + fvs_to_update={ + feature_view_1.name: feature_view_1, + feature_view_2.name: feature_view_2, + } + ) + + assert len(feature_view_1.schema) == 0 + assert len(feature_view_1.features) == 3 + assert len(feature_view_2.schema) == 0 + assert len(feature_view_2.features) == 3 + assert len(feature_service.feature_view_projections[0].features) == 1 + assert len(feature_service.feature_view_projections[1].features) == 3 + + +# TODO(felixwang9817): Add tests that interact with field mapping. diff --git a/sdk/python/tests/unit/infra/test_local_registry.py b/sdk/python/tests/unit/infra/test_local_registry.py new file mode 100644 index 0000000000..d69ae6aafd --- /dev/null +++ b/sdk/python/tests/unit/infra/test_local_registry.py @@ -0,0 +1,535 @@ +# Copyright 2022 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from datetime import timedelta +from tempfile import mkstemp + +import pandas as pd +import pytest +from pytest_lazyfixture import lazy_fixture + +from feast import FileSource +from feast.aggregation import Aggregation +from feast.data_format import AvroFormat, ParquetFormat +from feast.data_source import KafkaSource +from feast.entity import Entity +from feast.feature import Feature +from feast.feature_view import FeatureView +from feast.field import Field +from feast.on_demand_feature_view import RequestSource, on_demand_feature_view +from feast.registry import Registry +from feast.repo_config import RegistryConfig +from feast.stream_feature_view import StreamFeatureView +from feast.types import Array, Bytes, Float32, Int32, Int64, String +from feast.value_type import ValueType +from tests.utils.e2e_test_validation import validate_registry_data_source_apply + + +@pytest.fixture +def local_registry() -> Registry: + fd, registry_path = mkstemp() + registry_config = RegistryConfig(path=registry_path, cache_ttl_seconds=600) + return Registry(registry_config, None) + + +@pytest.mark.parametrize( + "test_registry", + [lazy_fixture("local_registry")], +) +def test_apply_entity_success(test_registry): + entity = Entity( + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, + ) + + project = "project" + + # Register Entity + test_registry.apply_entity(entity, project) + + entities = test_registry.list_entities(project) + + entity = entities[0] + assert ( + len(entities) == 1 + and entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + + entity = test_registry.get_entity("driver_car_id", project) + assert ( + entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + + test_registry.delete_entity("driver_car_id", project) + entities = test_registry.list_entities(project) + assert len(entities) == 0 + + test_registry.teardown() + + # Will try to reload registry, which will fail because the file has been deleted + with pytest.raises(FileNotFoundError): + test_registry._get_registry_proto(project=project) + + +@pytest.mark.parametrize( + "test_registry", + [lazy_fixture("local_registry")], +) +def test_apply_feature_view_success(test_registry): + # Create Feature Views + batch_source = FileSource( + file_format=ParquetFormat(), + path="file://feast/*", + timestamp_field="ts_col", + created_timestamp_column="timestamp", + ) + + entity = Entity(name="fs1_my_entity_1", join_keys=["test"]) + + fv1 = FeatureView( + name="my_feature_view_1", + schema=[ + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + Field(name="fs1_my_feature_4", dtype=Array(Bytes)), + ], + entities=[entity], + tags={"team": "matchmaking"}, + batch_source=batch_source, + ttl=timedelta(minutes=5), + ) + + project = "project" + + # Register Feature View + test_registry.apply_feature_view(fv1, project) + + feature_views = test_registry.list_feature_views(project) + + # List Feature Views + assert ( + len(feature_views) == 1 + and feature_views[0].name == "my_feature_view_1" + and feature_views[0].features[0].name == "fs1_my_feature_1" + and feature_views[0].features[0].dtype == Int64 + and feature_views[0].features[1].name == "fs1_my_feature_2" + and feature_views[0].features[1].dtype == String + and feature_views[0].features[2].name == "fs1_my_feature_3" + and feature_views[0].features[2].dtype == Array(String) + and feature_views[0].features[3].name == "fs1_my_feature_4" + and feature_views[0].features[3].dtype == Array(Bytes) + and feature_views[0].entities[0] == "fs1_my_entity_1" + ) + + feature_view = test_registry.get_feature_view("my_feature_view_1", project) + assert ( + feature_view.name == "my_feature_view_1" + and feature_view.features[0].name == "fs1_my_feature_1" + and feature_view.features[0].dtype == Int64 + and feature_view.features[1].name == "fs1_my_feature_2" + and feature_view.features[1].dtype == String + and feature_view.features[2].name == "fs1_my_feature_3" + and feature_view.features[2].dtype == Array(String) + and feature_view.features[3].name == "fs1_my_feature_4" + and feature_view.features[3].dtype == Array(Bytes) + and feature_view.entities[0] == "fs1_my_entity_1" + ) + + test_registry.delete_feature_view("my_feature_view_1", project) + feature_views = test_registry.list_feature_views(project) + assert len(feature_views) == 0 + + test_registry.teardown() + + # Will try to reload registry, which will fail because the file has been deleted + with pytest.raises(FileNotFoundError): + test_registry._get_registry_proto(project=project) + + +@pytest.mark.parametrize( + "test_registry", + [lazy_fixture("local_registry")], +) +def test_apply_on_demand_feature_view_success(test_registry): + # Create Feature Views + driver_stats = FileSource( + name="driver_stats_source", + path="data/driver_stats_lat_lon.parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created", + description="A table describing the stats of a driver based on hourly logs", + owner="test2@gmail.com", + ) + + driver_daily_features_view = FeatureView( + name="driver_daily_features", + entities=["driver"], + ttl=timedelta(seconds=8640000000), + schema=[ + Field(name="daily_miles_driven", dtype=Float32), + Field(name="lat", dtype=Float32), + Field(name="lon", dtype=Float32), + Field(name="string_feature", dtype=String), + ], + online=True, + source=driver_stats, + tags={"production": "True"}, + owner="test2@gmail.com", + ) + + @on_demand_feature_view( + sources=[driver_daily_features_view], + schema=[Field(name="first_char", dtype=String)], + ) + def location_features_from_push(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["first_char"] = inputs["string_feature"].str[:1].astype("string") + return df + + project = "project" + + # Register Feature View + test_registry.apply_feature_view(location_features_from_push, project) + + feature_views = test_registry.list_on_demand_feature_views(project) + + # List Feature Views + assert ( + len(feature_views) == 1 + and feature_views[0].name == "location_features_from_push" + and feature_views[0].features[0].name == "first_char" + and feature_views[0].features[0].dtype == String + ) + + feature_view = test_registry.get_on_demand_feature_view( + "location_features_from_push", project + ) + assert ( + feature_view.name == "location_features_from_push" + and feature_view.features[0].name == "first_char" + and feature_view.features[0].dtype == String + ) + + test_registry.delete_feature_view("location_features_from_push", project) + feature_views = test_registry.list_on_demand_feature_views(project) + assert len(feature_views) == 0 + + test_registry.teardown() + + # Will try to reload registry, which will fail because the file has been deleted + with pytest.raises(FileNotFoundError): + test_registry._get_registry_proto(project=project) + + +@pytest.mark.parametrize( + "test_registry", + [lazy_fixture("local_registry")], +) +def test_apply_stream_feature_view_success(test_registry): + # Create Feature Views + def simple_udf(x: int): + return x + 3 + + entity = Entity(name="driver_entity", join_keys=["test_key"]) + + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=FileSource(path="some path"), + watermark_delay_threshold=timedelta(days=1), + ) + + sfv = StreamFeatureView( + name="test kafka stream feature view", + entities=[entity], + ttl=timedelta(days=30), + owner="test@example.com", + online=True, + schema=[Field(name="dummy_field", dtype=Float32)], + description="desc", + aggregations=[ + Aggregation( + column="dummy_field", + function="max", + time_window=timedelta(days=1), + ), + Aggregation( + column="dummy_field2", + function="count", + time_window=timedelta(days=24), + ), + ], + timestamp_field="event_timestamp", + mode="spark", + source=stream_source, + udf=simple_udf, + tags={}, + ) + + project = "project" + + # Register Feature View + test_registry.apply_feature_view(sfv, project) + + stream_feature_views = test_registry.list_stream_feature_views(project) + + # List Feature Views + assert len(stream_feature_views) == 1 + assert stream_feature_views[0] == sfv + + test_registry.delete_feature_view("test kafka stream feature view", project) + stream_feature_views = test_registry.list_stream_feature_views(project) + assert len(stream_feature_views) == 0 + + test_registry.teardown() + + # Will try to reload registry, which will fail because the file has been deleted + with pytest.raises(FileNotFoundError): + test_registry._get_registry_proto(project=project) + + +@pytest.mark.parametrize( + "test_registry", + [lazy_fixture("local_registry")], +) +# TODO(kevjumba): remove this in feast 0.24 when deprecating +@pytest.mark.parametrize( + "request_source_schema", + [[Field(name="my_input_1", dtype=Int32)], {"my_input_1": ValueType.INT32}], +) +def test_modify_feature_views_success(test_registry, request_source_schema): + # Create Feature Views + batch_source = FileSource( + file_format=ParquetFormat(), + path="file://feast/*", + timestamp_field="ts_col", + created_timestamp_column="timestamp", + ) + + request_source = RequestSource( + name="request_source", + schema=request_source_schema, + ) + + entity = Entity(name="fs1_my_entity_1", join_keys=["test"]) + + fv1 = FeatureView( + name="my_feature_view_1", + schema=[Field(name="fs1_my_feature_1", dtype=Int64)], + entities=[entity], + tags={"team": "matchmaking"}, + batch_source=batch_source, + ttl=timedelta(minutes=5), + ) + + @on_demand_feature_view( + features=[ + Feature(name="odfv1_my_feature_1", dtype=ValueType.STRING), + Feature(name="odfv1_my_feature_2", dtype=ValueType.INT32), + ], + sources=[request_source], + ) + def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["odfv1_my_feature_1"] = feature_df["my_input_1"].astype("category") + data["odfv1_my_feature_2"] = feature_df["my_input_1"].astype("int32") + return data + + project = "project" + + # Register Feature Views + test_registry.apply_feature_view(odfv1, project) + test_registry.apply_feature_view(fv1, project) + + # Modify odfv by changing a single feature dtype + @on_demand_feature_view( + features=[ + Feature(name="odfv1_my_feature_1", dtype=ValueType.FLOAT), + Feature(name="odfv1_my_feature_2", dtype=ValueType.INT32), + ], + sources=[request_source], + ) + def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["odfv1_my_feature_1"] = feature_df["my_input_1"].astype("float") + data["odfv1_my_feature_2"] = feature_df["my_input_1"].astype("int32") + return data + + # Apply the modified odfv + test_registry.apply_feature_view(odfv1, project) + + # Check odfv + on_demand_feature_views = test_registry.list_on_demand_feature_views(project) + + assert ( + len(on_demand_feature_views) == 1 + and on_demand_feature_views[0].name == "odfv1" + and on_demand_feature_views[0].features[0].name == "odfv1_my_feature_1" + and on_demand_feature_views[0].features[0].dtype == Float32 + and on_demand_feature_views[0].features[1].name == "odfv1_my_feature_2" + and on_demand_feature_views[0].features[1].dtype == Int32 + ) + request_schema = on_demand_feature_views[0].get_request_data_schema() + assert ( + list(request_schema.keys())[0] == "my_input_1" + and list(request_schema.values())[0] == ValueType.INT32 + ) + + feature_view = test_registry.get_on_demand_feature_view("odfv1", project) + assert ( + feature_view.name == "odfv1" + and feature_view.features[0].name == "odfv1_my_feature_1" + and feature_view.features[0].dtype == Float32 + and feature_view.features[1].name == "odfv1_my_feature_2" + and feature_view.features[1].dtype == Int32 + ) + request_schema = feature_view.get_request_data_schema() + assert ( + list(request_schema.keys())[0] == "my_input_1" + and list(request_schema.values())[0] == ValueType.INT32 + ) + + # Make sure fv1 is untouched + feature_views = test_registry.list_feature_views(project) + + # List Feature Views + assert ( + len(feature_views) == 1 + and feature_views[0].name == "my_feature_view_1" + and feature_views[0].features[0].name == "fs1_my_feature_1" + and feature_views[0].features[0].dtype == Int64 + and feature_views[0].entities[0] == "fs1_my_entity_1" + ) + + feature_view = test_registry.get_feature_view("my_feature_view_1", project) + assert ( + feature_view.name == "my_feature_view_1" + and feature_view.features[0].name == "fs1_my_feature_1" + and feature_view.features[0].dtype == Int64 + and feature_view.entities[0] == "fs1_my_entity_1" + ) + + test_registry.teardown() + + # Will try to reload registry, which will fail because the file has been deleted + with pytest.raises(FileNotFoundError): + test_registry._get_registry_proto(project=project) + + +@pytest.mark.parametrize( + "test_registry", + [lazy_fixture("local_registry")], +) +def test_apply_data_source(test_registry: Registry): + validate_registry_data_source_apply(test_registry) + + +def test_commit(): + fd, registry_path = mkstemp() + registry_config = RegistryConfig(path=registry_path, cache_ttl_seconds=600) + test_registry = Registry(registry_config, None) + + entity = Entity( + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, + ) + + project = "project" + + # Register Entity without commiting + test_registry.apply_entity(entity, project, commit=False) + assert test_registry.cached_registry_proto + assert len(test_registry.cached_registry_proto.project_metadata) == 1 + project_metadata = test_registry.cached_registry_proto.project_metadata[0] + project_uuid = project_metadata.project_uuid + assert len(project_uuid) == 36 + validate_project_uuid(project_uuid, test_registry) + + # Retrieving the entity should still succeed + entities = test_registry.list_entities(project, allow_cache=True) + entity = entities[0] + assert ( + len(entities) == 1 + and entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + validate_project_uuid(project_uuid, test_registry) + + entity = test_registry.get_entity("driver_car_id", project, allow_cache=True) + assert ( + entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + validate_project_uuid(project_uuid, test_registry) + + # Create new registry that points to the same store + registry_with_same_store = Registry(registry_config, None) + + # Retrieving the entity should fail since the store is empty + entities = registry_with_same_store.list_entities(project) + assert len(entities) == 0 + validate_project_uuid(project_uuid, registry_with_same_store) + + # commit from the original registry + test_registry.commit() + + # Reconstruct the new registry in order to read the newly written store + registry_with_same_store = Registry(registry_config, None) + + # Retrieving the entity should now succeed + entities = registry_with_same_store.list_entities(project) + entity = entities[0] + assert ( + len(entities) == 1 + and entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + validate_project_uuid(project_uuid, registry_with_same_store) + + entity = test_registry.get_entity("driver_car_id", project) + assert ( + entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + + test_registry.teardown() + + # Will try to reload registry, which will fail because the file has been deleted + with pytest.raises(FileNotFoundError): + test_registry._get_registry_proto(project=project) + + +def validate_project_uuid(project_uuid, test_registry): + assert len(test_registry.cached_registry_proto.project_metadata) == 1 + project_metadata = test_registry.cached_registry_proto.project_metadata[0] + assert project_metadata.project_uuid == project_uuid diff --git a/sdk/python/tests/integration/online_store/test_e2e_local.py b/sdk/python/tests/unit/local_feast_tests/test_e2e_local.py similarity index 53% rename from sdk/python/tests/integration/online_store/test_e2e_local.py rename to sdk/python/tests/unit/local_feast_tests/test_e2e_local.py index 34758a50d0..5fbedf944d 100644 --- a/sdk/python/tests/integration/online_store/test_e2e_local.py +++ b/sdk/python/tests/unit/local_feast_tests/test_e2e_local.py @@ -4,143 +4,14 @@ from pathlib import Path import pandas as pd -from pytz import utc from feast.driver_test_data import ( create_driver_hourly_stats_df, create_global_daily_stats_df, ) from feast.feature_store import FeatureStore -from tests.utils.cli_utils import CliRunner, get_example_repo - - -def _get_last_feature_row(df: pd.DataFrame, driver_id, max_date: datetime): - """Manually extract last feature value from a dataframe for a given driver_id with up to `max_date` date""" - filtered = df[ - (df["driver_id"] == driver_id) - & (df["event_timestamp"] < max_date.replace(tzinfo=utc)) - ] - max_ts = filtered.loc[filtered["event_timestamp"].idxmax()]["event_timestamp"] - filtered_by_ts = filtered[filtered["event_timestamp"] == max_ts] - return filtered_by_ts.loc[filtered_by_ts["created"].idxmax()] - - -def _assert_online_features( - store: FeatureStore, driver_df: pd.DataFrame, max_date: datetime -): - """Assert that features in online store are up to date with `max_date` date.""" - # Read features back - response = store.get_online_features( - features=[ - "driver_hourly_stats:conv_rate", - "driver_hourly_stats:avg_daily_trips", - "global_daily_stats:num_rides", - "global_daily_stats:avg_ride_length", - ], - entity_rows=[{"driver_id": 1001}], - full_feature_names=True, - ) - - # Float features should still be floats. - assert ( - response.proto.results[ - list(response.proto.metadata.feature_names.val).index( - "driver_hourly_stats__conv_rate" - ) - ] - .values[0] - .float_val - > 0 - ), response.to_dict() - - result = response.to_dict() - assert len(result) == 5 - assert "driver_hourly_stats__avg_daily_trips" in result - assert "driver_hourly_stats__conv_rate" in result - assert ( - abs( - result["driver_hourly_stats__conv_rate"][0] - - _get_last_feature_row(driver_df, 1001, max_date)["conv_rate"] - ) - < 0.01 - ) - assert "global_daily_stats__num_rides" in result - assert "global_daily_stats__avg_ride_length" in result - - # Test the ODFV if it exists. - odfvs = store.list_on_demand_feature_views() - if odfvs and odfvs[0].name == "conv_rate_plus_100": - response = store.get_online_features( - features=[ - "conv_rate_plus_100:conv_rate_plus_100", - "conv_rate_plus_100:conv_rate_plus_val_to_add", - ], - entity_rows=[{"driver_id": 1001, "val_to_add": 100}], - full_feature_names=True, - ) - - # Check that float64 feature is stored correctly in proto format. - assert ( - response.proto.results[ - list(response.proto.metadata.feature_names.val).index( - "conv_rate_plus_100__conv_rate_plus_100" - ) - ] - .values[0] - .double_val - > 0 - ) - - result = response.to_dict() - assert len(result) == 3 - assert "conv_rate_plus_100__conv_rate_plus_100" in result - assert "conv_rate_plus_100__conv_rate_plus_val_to_add" in result - assert ( - abs( - result["conv_rate_plus_100__conv_rate_plus_100"][0] - - (_get_last_feature_row(driver_df, 1001, max_date)["conv_rate"] + 100) - ) - < 0.01 - ) - assert ( - abs( - result["conv_rate_plus_100__conv_rate_plus_val_to_add"][0] - - (_get_last_feature_row(driver_df, 1001, max_date)["conv_rate"] + 100) - ) - < 0.01 - ) - - -def _test_materialize_and_online_retrieval( - runner: CliRunner, - store: FeatureStore, - start_date: datetime, - end_date: datetime, - driver_df: pd.DataFrame, -): - assert store.repo_path is not None - - # Test `feast materialize` and online retrieval. - r = runner.run( - [ - "materialize", - start_date.isoformat(), - (end_date - timedelta(days=7)).isoformat(), - ], - cwd=Path(store.repo_path), - ) - - assert r.returncode == 0, f"stdout: {r.stdout}\n stderr: {r.stderr}" - _assert_online_features(store, driver_df, end_date - timedelta(days=7)) - - # Test `feast materialize-incremental` and online retrieval. - r = runner.run( - ["materialize-incremental", end_date.isoformat()], - cwd=Path(store.repo_path), - ) - - assert r.returncode == 0, f"stdout: {r.stdout}\n stderr: {r.stderr}" - _assert_online_features(store, driver_df, end_date) +from tests.utils.cli_repo_creator import CliRunner, get_example_repo +from tests.utils.feature_records import validate_online_features def test_e2e_local() -> None: @@ -217,3 +88,35 @@ def test_e2e_local() -> None: assert returncode != 0 assert "feast.errors.FeastJoinKeysDuringMaterialization" in str(output) + + +def _test_materialize_and_online_retrieval( + runner: CliRunner, + store: FeatureStore, + start_date: datetime, + end_date: datetime, + driver_df: pd.DataFrame, +): + assert store.repo_path is not None + + # Test `feast materialize` and online retrieval. + r = runner.run( + [ + "materialize", + start_date.isoformat(), + (end_date - timedelta(days=7)).isoformat(), + ], + cwd=Path(store.repo_path), + ) + + assert r.returncode == 0, f"stdout: {r.stdout}\n stderr: {r.stderr}" + validate_online_features(store, driver_df, end_date - timedelta(days=7)) + + # Test `feast materialize-incremental` and online retrieval. + r = runner.run( + ["materialize-incremental", end_date.isoformat()], + cwd=Path(store.repo_path), + ) + + assert r.returncode == 0, f"stdout: {r.stdout}\n stderr: {r.stderr}" + validate_online_features(store, driver_df, end_date) diff --git a/sdk/python/tests/integration/online_store/test_feature_service_read.py b/sdk/python/tests/unit/local_feast_tests/test_feature_service_read.py similarity index 78% rename from sdk/python/tests/integration/online_store/test_feature_service_read.py rename to sdk/python/tests/unit/local_feast_tests/test_feature_service_read.py index 33c318b9ed..72392b0396 100644 --- a/sdk/python/tests/integration/online_store/test_feature_service_read.py +++ b/sdk/python/tests/unit/local_feast_tests/test_feature_service_read.py @@ -1,7 +1,7 @@ import pytest -from tests.utils.cli_utils import CliRunner, get_example_repo -from tests.utils.online_read_write_test import basic_rw_test +from tests.utils.basic_read_write_test import basic_rw_test +from tests.utils.cli_repo_creator import CliRunner, get_example_repo @pytest.mark.integration diff --git a/sdk/python/tests/integration/scaffolding/test_init.py b/sdk/python/tests/unit/local_feast_tests/test_init.py similarity index 97% rename from sdk/python/tests/integration/scaffolding/test_init.py rename to sdk/python/tests/unit/local_feast_tests/test_init.py index 1cada91ea0..f9bf536e56 100644 --- a/sdk/python/tests/integration/scaffolding/test_init.py +++ b/sdk/python/tests/unit/local_feast_tests/test_init.py @@ -3,7 +3,7 @@ from pathlib import Path from textwrap import dedent -from tests.utils.cli_utils import CliRunner +from tests.utils.cli_repo_creator import CliRunner def test_repo_init() -> None: diff --git a/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py b/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py new file mode 100644 index 0000000000..44a35e0660 --- /dev/null +++ b/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py @@ -0,0 +1,266 @@ +from datetime import datetime, timedelta +from tempfile import mkstemp + +import pytest +from pytest_lazyfixture import lazy_fixture + +from feast import FileSource +from feast.data_format import ParquetFormat +from feast.entity import Entity +from feast.feature_store import FeatureStore +from feast.feature_view import FeatureView +from feast.field import Field +from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig +from feast.repo_config import RepoConfig +from feast.types import Array, Bytes, Int64, String +from tests.utils.data_source_test_creator import prep_file_source + + +@pytest.mark.parametrize( + "test_feature_store", + [lazy_fixture("feature_store_with_local_registry")], +) +def test_apply_entity_success(test_feature_store): + entity = Entity( + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, + ) + + # Register Entity + test_feature_store.apply(entity) + + entities = test_feature_store.list_entities() + + entity = entities[0] + assert ( + len(entities) == 1 + and entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + + test_feature_store.teardown() + + +@pytest.mark.parametrize( + "test_feature_store", + [lazy_fixture("feature_store_with_local_registry")], +) +def test_apply_feature_view_success(test_feature_store): + # Create Feature Views + batch_source = FileSource( + file_format=ParquetFormat(), + path="file://feast/*", + timestamp_field="ts_col", + created_timestamp_column="timestamp", + date_partition_column="date_partition_col", + ) + + entity = Entity(name="fs1_my_entity_1", join_keys=["entity_id"]) + + fv1 = FeatureView( + name="my_feature_view_1", + schema=[ + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + Field(name="fs1_my_feature_4", dtype=Array(Bytes)), + Field(name="entity_id", dtype=Int64), + ], + entities=[entity], + tags={"team": "matchmaking"}, + batch_source=batch_source, + ttl=timedelta(minutes=5), + ) + + # Register Feature View + test_feature_store.apply([entity, fv1]) + + feature_views = test_feature_store.list_feature_views() + + # List Feature Views + assert ( + len(feature_views) == 1 + and feature_views[0].name == "my_feature_view_1" + and feature_views[0].features[0].name == "fs1_my_feature_1" + and feature_views[0].features[0].dtype == Int64 + and feature_views[0].features[1].name == "fs1_my_feature_2" + and feature_views[0].features[1].dtype == String + and feature_views[0].features[2].name == "fs1_my_feature_3" + and feature_views[0].features[2].dtype == Array(String) + and feature_views[0].features[3].name == "fs1_my_feature_4" + and feature_views[0].features[3].dtype == Array(Bytes) + and feature_views[0].entities[0] == "fs1_my_entity_1" + ) + + test_feature_store.teardown() + + +@pytest.mark.parametrize( + "test_feature_store", + [lazy_fixture("feature_store_with_local_registry")], +) +def test_apply_object_and_read(test_feature_store): + assert isinstance(test_feature_store, FeatureStore) + # Create Feature Views + batch_source = FileSource( + file_format=ParquetFormat(), + path="file://feast/*", + timestamp_field="ts_col", + created_timestamp_column="timestamp", + ) + + e1 = Entity(name="fs1_my_entity_1", description="something") + + e2 = Entity(name="fs1_my_entity_2", description="something") + + fv1 = FeatureView( + name="my_feature_view_1", + schema=[ + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + Field(name="fs1_my_feature_4", dtype=Array(Bytes)), + Field(name="fs1_my_entity_1", dtype=Int64), + ], + entities=[e1], + tags={"team": "matchmaking"}, + batch_source=batch_source, + ttl=timedelta(minutes=5), + ) + + fv2 = FeatureView( + name="my_feature_view_2", + schema=[ + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + Field(name="fs1_my_feature_4", dtype=Array(Bytes)), + Field(name="fs1_my_entity_2", dtype=Int64), + ], + entities=[e2], + tags={"team": "matchmaking"}, + batch_source=batch_source, + ttl=timedelta(minutes=5), + ) + + # Register Feature View + test_feature_store.apply([fv1, e1, fv2, e2]) + + fv1_actual = test_feature_store.get_feature_view("my_feature_view_1") + e1_actual = test_feature_store.get_entity("fs1_my_entity_1") + + assert e1 == e1_actual + assert fv2 != fv1_actual + assert e2 != e1_actual + + test_feature_store.teardown() + + +@pytest.mark.parametrize( + "test_feature_store", + [lazy_fixture("feature_store_with_local_registry")], +) +@pytest.mark.parametrize("dataframe_source", [lazy_fixture("simple_dataset_1")]) +def test_reapply_feature_view_success(test_feature_store, dataframe_source): + with prep_file_source(df=dataframe_source, timestamp_field="ts_1") as file_source: + + e = Entity(name="id", join_keys=["id_join_key"]) + + # Create Feature View + fv1 = FeatureView( + name="my_feature_view_1", + schema=[Field(name="string_col", dtype=String)], + entities=[e], + batch_source=file_source, + ttl=timedelta(minutes=5), + ) + + # Register Feature View + test_feature_store.apply([fv1, e]) + + # Check Feature View + fv_stored = test_feature_store.get_feature_view(fv1.name) + assert len(fv_stored.materialization_intervals) == 0 + + # Run materialization + test_feature_store.materialize(datetime(2020, 1, 1), datetime(2021, 1, 1)) + + # Check Feature View + fv_stored = test_feature_store.get_feature_view(fv1.name) + assert len(fv_stored.materialization_intervals) == 1 + + # Apply again + test_feature_store.apply([fv1]) + + # Check Feature View + fv_stored = test_feature_store.get_feature_view(fv1.name) + assert len(fv_stored.materialization_intervals) == 1 + + # Change and apply Feature View + fv1 = FeatureView( + name="my_feature_view_1", + schema=[Field(name="int64_col", dtype=Int64)], + entities=[e], + batch_source=file_source, + ttl=timedelta(minutes=5), + ) + test_feature_store.apply([fv1]) + + # Check Feature View + fv_stored = test_feature_store.get_feature_view(fv1.name) + assert len(fv_stored.materialization_intervals) == 0 + + test_feature_store.teardown() + + +def test_apply_conflicting_featureview_names(feature_store_with_local_registry): + """Test applying feature views with non-case-insensitively unique names""" + driver = Entity(name="driver", join_keys=["driver_id"]) + customer = Entity(name="customer", join_keys=["customer_id"]) + + driver_stats = FeatureView( + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(seconds=10), + online=False, + batch_source=FileSource(path="driver_stats.parquet"), + tags={}, + ) + + customer_stats = FeatureView( + name="DRIVER_HOURLY_STATS", + entities=[customer], + ttl=timedelta(seconds=10), + online=False, + batch_source=FileSource(path="customer_stats.parquet"), + tags={}, + ) + try: + feature_store_with_local_registry.apply([driver_stats, customer_stats]) + error = None + except ValueError as e: + error = e + assert ( + isinstance(error, ValueError) + and "Please ensure that all feature view names are case-insensitively unique" + in error.args[0] + ) + + feature_store_with_local_registry.teardown() + + +@pytest.fixture +def feature_store_with_local_registry(): + fd, registry_path = mkstemp() + fd, online_store_path = mkstemp() + return FeatureStore( + config=RepoConfig( + registry=registry_path, + project="default", + provider="local", + online_store=SqliteOnlineStoreConfig(path=online_store_path), + ) + ) diff --git a/sdk/python/tests/unit/local_feast_tests/test_stream_feature_view_apply.py b/sdk/python/tests/unit/local_feast_tests/test_stream_feature_view_apply.py new file mode 100644 index 0000000000..ca54d882b5 --- /dev/null +++ b/sdk/python/tests/unit/local_feast_tests/test_stream_feature_view_apply.py @@ -0,0 +1,189 @@ +import os +import tempfile +from datetime import datetime, timedelta + +from feast.aggregation import Aggregation +from feast.data_format import AvroFormat +from feast.data_source import KafkaSource +from feast.driver_test_data import ( + create_driver_hourly_stats_df, + create_global_daily_stats_df, +) +from feast.entity import Entity +from feast.field import Field +from feast.stream_feature_view import stream_feature_view +from feast.types import Float32 +from tests.utils.cli_repo_creator import CliRunner, get_example_repo +from tests.utils.data_source_test_creator import prep_file_source + + +def test_apply_stream_feature_view(simple_dataset_1) -> None: + """ + Test apply of StreamFeatureView. + """ + runner = CliRunner() + with tempfile.TemporaryDirectory() as data_dir: + # Generate test data. + end_date = datetime.now().replace(microsecond=0, second=0, minute=0) + start_date = end_date - timedelta(days=15) + + driver_entities = [1001, 1002, 1003, 1004, 1005] + driver_df = create_driver_hourly_stats_df(driver_entities, start_date, end_date) + driver_stats_path = os.path.join(data_dir, "driver_stats.parquet") + driver_df.to_parquet(path=driver_stats_path, allow_truncated_timestamps=True) + + global_df = create_global_daily_stats_df(start_date, end_date) + global_stats_path = os.path.join(data_dir, "global_stats.parquet") + global_df.to_parquet(path=global_stats_path, allow_truncated_timestamps=True) + + with runner.local_repo( + get_example_repo("example_feature_repo_2.py") + .replace("%PARQUET_PATH%", driver_stats_path) + .replace("%PARQUET_PATH_GLOBAL%", global_stats_path), + "file", + ) as fs, prep_file_source( + df=simple_dataset_1, timestamp_field="ts_1" + ) as file_source: + entity = Entity(name="driver_entity", join_keys=["test_key"]) + + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=file_source, + watermark_delay_threshold=timedelta(days=1), + ) + + @stream_feature_view( + entities=[entity], + ttl=timedelta(days=30), + owner="test@example.com", + online=True, + schema=[Field(name="dummy_field", dtype=Float32)], + description="desc", + aggregations=[ + Aggregation( + column="dummy_field", + function="max", + time_window=timedelta(days=1), + ), + Aggregation( + column="dummy_field2", + function="count", + time_window=timedelta(days=24), + ), + ], + timestamp_field="event_timestamp", + mode="spark", + source=stream_source, + tags={}, + ) + def simple_sfv(df): + return df + + fs.apply([entity, simple_sfv]) + + stream_feature_views = fs.list_stream_feature_views() + assert len(stream_feature_views) == 1 + assert stream_feature_views[0] == simple_sfv + + features = fs.get_online_features( + features=["simple_sfv:dummy_field"], + entity_rows=[{"test_key": 1001}], + ).to_dict(include_event_timestamps=True) + + assert "test_key" in features + assert features["test_key"] == [1001] + assert "dummy_field" in features + assert features["dummy_field"] == [None] + + +def test_stream_feature_view_udf(simple_dataset_1) -> None: + """ + Test apply of StreamFeatureView udfs are serialized correctly and usable. + """ + runner = CliRunner() + with tempfile.TemporaryDirectory() as data_dir: + # Generate test data. + end_date = datetime.now().replace(microsecond=0, second=0, minute=0) + start_date = end_date - timedelta(days=15) + + driver_entities = [1001, 1002, 1003, 1004, 1005] + driver_df = create_driver_hourly_stats_df(driver_entities, start_date, end_date) + driver_stats_path = os.path.join(data_dir, "driver_stats.parquet") + driver_df.to_parquet(path=driver_stats_path, allow_truncated_timestamps=True) + + global_df = create_global_daily_stats_df(start_date, end_date) + global_stats_path = os.path.join(data_dir, "global_stats.parquet") + global_df.to_parquet(path=global_stats_path, allow_truncated_timestamps=True) + + with runner.local_repo( + get_example_repo("example_feature_repo_2.py") + .replace("%PARQUET_PATH%", driver_stats_path) + .replace("%PARQUET_PATH_GLOBAL%", global_stats_path), + "file", + ) as fs, prep_file_source( + df=simple_dataset_1, timestamp_field="ts_1" + ) as file_source: + entity = Entity(name="driver_entity", join_keys=["test_key"]) + + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=file_source, + watermark_delay_threshold=timedelta(days=1), + ) + + @stream_feature_view( + entities=[entity], + ttl=timedelta(days=30), + owner="test@example.com", + online=True, + schema=[Field(name="dummy_field", dtype=Float32)], + description="desc", + aggregations=[ + Aggregation( + column="dummy_field", + function="max", + time_window=timedelta(days=1), + ), + Aggregation( + column="dummy_field2", + function="count", + time_window=timedelta(days=24), + ), + ], + timestamp_field="event_timestamp", + mode="spark", + source=stream_source, + tags={}, + ) + def pandas_view(pandas_df): + import pandas as pd + + assert type(pandas_df) == pd.DataFrame + df = pandas_df.transform(lambda x: x + 10, axis=1) + df.insert(2, "C", [20.2, 230.0, 34.0], True) + return df + + import pandas as pd + + fs.apply([entity, pandas_view]) + + stream_feature_views = fs.list_stream_feature_views() + assert len(stream_feature_views) == 1 + assert stream_feature_views[0] == pandas_view + + sfv = stream_feature_views[0] + + df = pd.DataFrame({"A": [1, 2, 3], "B": [10, 20, 30]}) + new_df = sfv.udf(df) + expected_df = pd.DataFrame( + {"A": [11, 12, 13], "B": [20, 30, 40], "C": [20.2, 230.0, 34.0]} + ) + assert new_df.equals(expected_df) diff --git a/sdk/python/tests/unit/test_feature_service.py b/sdk/python/tests/unit/test_feature_service.py index fc4fd70bcb..da69809b3e 100644 --- a/sdk/python/tests/unit/test_feature_service.py +++ b/sdk/python/tests/unit/test_feature_service.py @@ -5,6 +5,7 @@ from feast.field import Field from feast.infra.offline_stores.file_source import FileSource from feast.types import Float32 +from tests.utils.test_wrappers import no_warnings def test_feature_service_with_description(): @@ -16,7 +17,6 @@ def test_feature_service_with_description(): def test_feature_service_without_description(): feature_service = FeatureService(name="my-feature-service", features=[]) - # assert feature_service.to_proto().spec.description == "" @@ -75,19 +75,6 @@ def test_feature_view_kw_args_warning(): service = FeatureService(features=[], tags={"tag_1": "tag"}, description="desc") -def no_warnings(func): - def wrapper_no_warnings(*args, **kwargs): - with pytest.warns(None) as warnings: - func(*args, **kwargs) - - if len(warnings) > 0: - raise AssertionError( - "Warnings were raised: " + ", ".join([str(w) for w in warnings]) - ) - - return wrapper_no_warnings - - @no_warnings def test_feature_view_kw_args_normal(): file_source = FileSource(name="my-file-source", path="test.parquet") diff --git a/sdk/python/tests/unit/test_proto_json.py b/sdk/python/tests/unit/test_proto_json.py index 235ebc7f93..b5e01744e4 100644 --- a/sdk/python/tests/unit/test_proto_json.py +++ b/sdk/python/tests/unit/test_proto_json.py @@ -12,11 +12,6 @@ FeatureVector = GetOnlineFeaturesResponse.FeatureVector -@pytest.fixture(scope="module") -def proto_json_patch(): - proto_json.patch() - - def test_feature_vector_values(proto_json_patch): # FeatureVector contains "repeated values" proto field. # We want to test that feast.types.Value can take different types in JSON @@ -106,3 +101,8 @@ def test_feature_list(proto_json_patch): assertpy.assert_that(feature_list_json).is_equal_to( ["feature-a", "feature-b", "feature-c"] ) + + +@pytest.fixture(scope="module") +def proto_json_patch(): + proto_json.patch() diff --git a/sdk/python/tests/utils/online_read_write_test.py b/sdk/python/tests/utils/basic_read_write_test.py similarity index 100% rename from sdk/python/tests/utils/online_read_write_test.py rename to sdk/python/tests/utils/basic_read_write_test.py diff --git a/sdk/python/tests/utils/cli_utils.py b/sdk/python/tests/utils/cli_repo_creator.py similarity index 100% rename from sdk/python/tests/utils/cli_utils.py rename to sdk/python/tests/utils/cli_repo_creator.py diff --git a/sdk/python/tests/utils/data_source_utils.py b/sdk/python/tests/utils/data_source_test_creator.py similarity index 100% rename from sdk/python/tests/utils/data_source_utils.py rename to sdk/python/tests/utils/data_source_test_creator.py diff --git a/sdk/python/tests/utils/online_store_utils.py b/sdk/python/tests/utils/dynamo_table_creator.py similarity index 89% rename from sdk/python/tests/utils/online_store_utils.py rename to sdk/python/tests/utils/dynamo_table_creator.py index 9cd7663869..20bac122b3 100644 --- a/sdk/python/tests/utils/online_store_utils.py +++ b/sdk/python/tests/utils/dynamo_table_creator.py @@ -8,7 +8,7 @@ from feast.protos.feast.types.Value_pb2 import Value as ValueProto -def _create_n_customer_test_samples(n=10): +def create_n_customer_test_samples(n=10): return [ ( EntityKeyProto( @@ -26,7 +26,7 @@ def _create_n_customer_test_samples(n=10): ] -def _create_test_table(project, tbl_name, region): +def create_test_table(project, tbl_name, region): client = boto3.client("dynamodb", region_name=region) client.create_table( TableName=f"{project}.{tbl_name}", @@ -36,12 +36,12 @@ def _create_test_table(project, tbl_name, region): ) -def _delete_test_table(project, tbl_name, region): +def delete_test_table(project, tbl_name, region): client = boto3.client("dynamodb", region_name=region) client.delete_table(TableName=f"{project}.{tbl_name}") -def _insert_data_test_table(data, project, tbl_name, region): +def insert_data_test_table(data, project, tbl_name, region): dynamodb_resource = boto3.resource("dynamodb", region_name=region) table_instance = dynamodb_resource.Table(f"{project}.{tbl_name}") for entity_key, features, timestamp, created_ts in data: diff --git a/sdk/python/tests/utils/e2e_test_validation.py b/sdk/python/tests/utils/e2e_test_validation.py new file mode 100644 index 0000000000..b2eb78f3c8 --- /dev/null +++ b/sdk/python/tests/utils/e2e_test_validation.py @@ -0,0 +1,277 @@ +import math +import os +import time +from datetime import datetime, timedelta +from pathlib import Path +from typing import List, Optional + +import pandas as pd +import pytest +import yaml +from pytz import utc + +from feast import FeatureStore, FeatureView, FileSource, RepoConfig +from feast.data_format import ParquetFormat +from feast.entity import Entity +from feast.field import Field +from feast.registry import Registry +from feast.types import Array, Bytes, Int64, String +from tests.integration.feature_repos.integration_test_repo_config import ( + IntegrationTestRepoConfig, +) +from tests.integration.feature_repos.universal.data_source_creator import ( + DataSourceCreator, +) +from tests.integration.feature_repos.universal.data_sources.bigquery import ( + BigQueryDataSourceCreator, +) +from tests.integration.feature_repos.universal.data_sources.file import ( + FileDataSourceCreator, +) +from tests.integration.feature_repos.universal.data_sources.redshift import ( + RedshiftDataSourceCreator, +) + + +def validate_offline_online_store_consistency( + fs: FeatureStore, fv: FeatureView, split_dt: datetime +) -> None: + now = datetime.utcnow() + + full_feature_names = True + check_offline_store: bool = True + + # Run materialize() + # use both tz-naive & tz-aware timestamps to test that they're both correctly handled + start_date = (now - timedelta(hours=5)).replace(tzinfo=utc) + end_date = split_dt + fs.materialize(feature_views=[fv.name], start_date=start_date, end_date=end_date) + + time.sleep(10) + + # check result of materialize() + _check_offline_and_online_features( + fs=fs, + fv=fv, + driver_id=1, + event_timestamp=end_date, + expected_value=0.3, + full_feature_names=full_feature_names, + check_offline_store=check_offline_store, + ) + + _check_offline_and_online_features( + fs=fs, + fv=fv, + driver_id=2, + event_timestamp=end_date, + expected_value=None, + full_feature_names=full_feature_names, + check_offline_store=check_offline_store, + ) + + # check prior value for materialize_incremental() + _check_offline_and_online_features( + fs=fs, + fv=fv, + driver_id=3, + event_timestamp=end_date, + expected_value=4, + full_feature_names=full_feature_names, + check_offline_store=check_offline_store, + ) + + # run materialize_incremental() + fs.materialize_incremental(feature_views=[fv.name], end_date=now) + + # check result of materialize_incremental() + _check_offline_and_online_features( + fs=fs, + fv=fv, + driver_id=3, + event_timestamp=now, + expected_value=5, + full_feature_names=full_feature_names, + check_offline_store=check_offline_store, + ) + + +def _check_offline_and_online_features( + fs: FeatureStore, + fv: FeatureView, + driver_id: int, + event_timestamp: datetime, + expected_value: Optional[float], + full_feature_names: bool, + check_offline_store: bool = True, +) -> None: + # Check online store + response_dict = fs.get_online_features( + [f"{fv.name}:value"], + [{"driver_id": driver_id}], + full_feature_names=full_feature_names, + ).to_dict() + + if full_feature_names: + + if expected_value: + assert response_dict[f"{fv.name}__value"][0], f"Response: {response_dict}" + assert ( + abs(response_dict[f"{fv.name}__value"][0] - expected_value) < 1e-6 + ), f"Response: {response_dict}, Expected: {expected_value}" + else: + assert response_dict[f"{fv.name}__value"][0] is None + else: + if expected_value: + assert response_dict["value"][0], f"Response: {response_dict}" + assert ( + abs(response_dict["value"][0] - expected_value) < 1e-6 + ), f"Response: {response_dict}, Expected: {expected_value}" + else: + assert response_dict["value"][0] is None + + # Check offline store + if check_offline_store: + df = fs.get_historical_features( + entity_df=pd.DataFrame.from_dict( + {"driver_id": [driver_id], "event_timestamp": [event_timestamp]} + ), + features=[f"{fv.name}:value"], + full_feature_names=full_feature_names, + ).to_df() + + if full_feature_names: + if expected_value: + assert ( + abs( + df.to_dict(orient="list")[f"{fv.name}__value"][0] + - expected_value + ) + < 1e-6 + ) + else: + assert not df.to_dict(orient="list")[f"{fv.name}__value"] or math.isnan( + df.to_dict(orient="list")[f"{fv.name}__value"][0] + ) + else: + if expected_value: + assert ( + abs(df.to_dict(orient="list")["value"][0] - expected_value) < 1e-6 + ) + else: + assert not df.to_dict(orient="list")["value"] or math.isnan( + df.to_dict(orient="list")["value"][0] + ) + + +def make_feature_store_yaml(project, test_repo_config, repo_dir_name: Path): + offline_creator: DataSourceCreator = test_repo_config.offline_store_creator(project) + + offline_store_config = offline_creator.create_offline_store_config() + online_store = test_repo_config.online_store + + config = RepoConfig( + registry=str(Path(repo_dir_name) / "registry.db"), + project=project, + provider=test_repo_config.provider, + offline_store=offline_store_config, + online_store=online_store, + repo_path=str(Path(repo_dir_name)), + ) + config_dict = config.dict() + if ( + isinstance(config_dict["online_store"], dict) + and "redis_type" in config_dict["online_store"] + ): + if str(config_dict["online_store"]["redis_type"]) == "RedisType.redis_cluster": + config_dict["online_store"]["redis_type"] = "redis_cluster" + elif str(config_dict["online_store"]["redis_type"]) == "RedisType.redis": + config_dict["online_store"]["redis_type"] = "redis" + config_dict["repo_path"] = str(config_dict["repo_path"]) + return yaml.safe_dump(config_dict) + + +NULLABLE_ONLINE_STORE_CONFIGS: List[IntegrationTestRepoConfig] = [ + IntegrationTestRepoConfig( + provider="local", + offline_store_creator=FileDataSourceCreator, + online_store=None, + ), +] + +if os.getenv("FEAST_IS_LOCAL_TEST", "False") == "True": + NULLABLE_ONLINE_STORE_CONFIGS.extend( + [ + IntegrationTestRepoConfig( + provider="gcp", + offline_store_creator=BigQueryDataSourceCreator, + online_store=None, + ), + IntegrationTestRepoConfig( + provider="aws", + offline_store_creator=RedshiftDataSourceCreator, + online_store=None, + ), + ] + ) + + +def validate_registry_data_source_apply(test_registry: Registry): + # Create Feature Views + batch_source = FileSource( + name="test_source", + file_format=ParquetFormat(), + path="file://feast/*", + timestamp_field="ts_col", + created_timestamp_column="timestamp", + ) + + entity = Entity(name="fs1_my_entity_1", join_keys=["test"]) + + fv1 = FeatureView( + name="my_feature_view_1", + schema=[ + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + Field(name="fs1_my_feature_4", dtype=Array(Bytes)), + ], + entities=[entity], + tags={"team": "matchmaking"}, + batch_source=batch_source, + ttl=timedelta(minutes=5), + ) + + project = "project" + + # Register data source and feature view + test_registry.apply_data_source(batch_source, project, commit=False) + test_registry.apply_feature_view(fv1, project, commit=True) + + registry_feature_views = test_registry.list_feature_views(project) + registry_data_sources = test_registry.list_data_sources(project) + assert len(registry_feature_views) == 1 + assert len(registry_data_sources) == 1 + registry_feature_view = registry_feature_views[0] + assert registry_feature_view.batch_source == batch_source + registry_data_source = registry_data_sources[0] + assert registry_data_source == batch_source + + # Check that change to batch source propagates + batch_source.timestamp_field = "new_ts_col" + test_registry.apply_data_source(batch_source, project, commit=False) + test_registry.apply_feature_view(fv1, project, commit=True) + registry_feature_views = test_registry.list_feature_views(project) + registry_data_sources = test_registry.list_data_sources(project) + assert len(registry_feature_views) == 1 + assert len(registry_data_sources) == 1 + registry_feature_view = registry_feature_views[0] + assert registry_feature_view.batch_source == batch_source + registry_batch_source = test_registry.list_data_sources(project)[0] + assert registry_batch_source == batch_source + + test_registry.teardown() + + # Will try to reload registry, which will fail because the file has been deleted + with pytest.raises(FileNotFoundError): + test_registry._get_registry_proto(project=project) diff --git a/sdk/python/tests/utils/feature_records.py b/sdk/python/tests/utils/feature_records.py new file mode 100644 index 0000000000..acc08ec121 --- /dev/null +++ b/sdk/python/tests/utils/feature_records.py @@ -0,0 +1,496 @@ +from datetime import datetime, timedelta +from typing import Any, Dict, List, Optional + +import pandas as pd +import pytest +from pandas.testing import assert_frame_equal as pd_assert_frame_equal +from pytz import utc + +from feast import FeatureStore, utils +from feast.errors import FeatureNameCollisionError +from feast.feature_view import FeatureView + + +def convert_timestamp_records_to_utc( + records: List[Dict[str, Any]], column: str +) -> List[Dict[str, Any]]: + for record in records: + record[column] = utils.make_tzaware(record[column]).astimezone(utc) + return records + + +# Find the latest record in the given time range and filter +def find_latest_record( + records: List[Dict[str, Any]], + ts_key: str, + ts_start: datetime, + ts_end: datetime, + filter_keys: Optional[List[str]] = None, + filter_values: Optional[List[Any]] = None, +) -> Dict[str, Any]: + filter_keys = filter_keys or [] + filter_values = filter_values or [] + assert len(filter_keys) == len(filter_values) + found_record: Dict[str, Any] = {} + for record in records: + if ( + all( + [ + record[filter_key] == filter_value + for filter_key, filter_value in zip(filter_keys, filter_values) + ] + ) + and ts_start <= record[ts_key] <= ts_end + ): + if not found_record or found_record[ts_key] < record[ts_key]: + found_record = record + return found_record + + +def get_expected_training_df( + customer_df: pd.DataFrame, + customer_fv: FeatureView, + driver_df: pd.DataFrame, + driver_fv: FeatureView, + orders_df: pd.DataFrame, + order_fv: FeatureView, + location_df: pd.DataFrame, + location_fv: FeatureView, + global_df: pd.DataFrame, + global_fv: FeatureView, + field_mapping_df: pd.DataFrame, + field_mapping_fv: FeatureView, + entity_df: pd.DataFrame, + event_timestamp: str, + full_feature_names: bool = False, +): + # Convert all pandas dataframes into records with UTC timestamps + customer_records = convert_timestamp_records_to_utc( + customer_df.to_dict("records"), customer_fv.batch_source.timestamp_field + ) + driver_records = convert_timestamp_records_to_utc( + driver_df.to_dict("records"), driver_fv.batch_source.timestamp_field + ) + order_records = convert_timestamp_records_to_utc( + orders_df.to_dict("records"), event_timestamp + ) + location_records = convert_timestamp_records_to_utc( + location_df.to_dict("records"), location_fv.batch_source.timestamp_field + ) + global_records = convert_timestamp_records_to_utc( + global_df.to_dict("records"), global_fv.batch_source.timestamp_field + ) + field_mapping_records = convert_timestamp_records_to_utc( + field_mapping_df.to_dict("records"), + field_mapping_fv.batch_source.timestamp_field, + ) + entity_rows = convert_timestamp_records_to_utc( + entity_df.to_dict("records"), event_timestamp + ) + + # Set sufficiently large ttl that it effectively functions as infinite for the calculations below. + default_ttl = timedelta(weeks=52) + + # Manually do point-in-time join of driver, customer, and order records against + # the entity df + for entity_row in entity_rows: + customer_record = find_latest_record( + customer_records, + ts_key=customer_fv.batch_source.timestamp_field, + ts_start=entity_row[event_timestamp] + - _get_feature_view_ttl(customer_fv, default_ttl), + ts_end=entity_row[event_timestamp], + filter_keys=["customer_id"], + filter_values=[entity_row["customer_id"]], + ) + driver_record = find_latest_record( + driver_records, + ts_key=driver_fv.batch_source.timestamp_field, + ts_start=entity_row[event_timestamp] + - _get_feature_view_ttl(driver_fv, default_ttl), + ts_end=entity_row[event_timestamp], + filter_keys=["driver_id"], + filter_values=[entity_row["driver_id"]], + ) + order_record = find_latest_record( + order_records, + ts_key=customer_fv.batch_source.timestamp_field, + ts_start=entity_row[event_timestamp] + - _get_feature_view_ttl(order_fv, default_ttl), + ts_end=entity_row[event_timestamp], + filter_keys=["customer_id", "driver_id"], + filter_values=[entity_row["customer_id"], entity_row["driver_id"]], + ) + origin_record = find_latest_record( + location_records, + ts_key=location_fv.batch_source.timestamp_field, + ts_start=order_record[event_timestamp] + - _get_feature_view_ttl(location_fv, default_ttl), + ts_end=order_record[event_timestamp], + filter_keys=["location_id"], + filter_values=[order_record["origin_id"]], + ) + destination_record = find_latest_record( + location_records, + ts_key=location_fv.batch_source.timestamp_field, + ts_start=order_record[event_timestamp] + - _get_feature_view_ttl(location_fv, default_ttl), + ts_end=order_record[event_timestamp], + filter_keys=["location_id"], + filter_values=[order_record["destination_id"]], + ) + global_record = find_latest_record( + global_records, + ts_key=global_fv.batch_source.timestamp_field, + ts_start=order_record[event_timestamp] + - _get_feature_view_ttl(global_fv, default_ttl), + ts_end=order_record[event_timestamp], + ) + + field_mapping_record = find_latest_record( + field_mapping_records, + ts_key=field_mapping_fv.batch_source.timestamp_field, + ts_start=order_record[event_timestamp] + - _get_feature_view_ttl(field_mapping_fv, default_ttl), + ts_end=order_record[event_timestamp], + ) + + entity_row.update( + { + ( + f"customer_profile__{k}" if full_feature_names else k + ): customer_record.get(k, None) + for k in ( + "current_balance", + "avg_passenger_count", + "lifetime_trip_count", + ) + } + ) + entity_row.update( + { + (f"driver_stats__{k}" if full_feature_names else k): driver_record.get( + k, None + ) + for k in ("conv_rate", "avg_daily_trips") + } + ) + entity_row.update( + { + (f"order__{k}" if full_feature_names else k): order_record.get(k, None) + for k in ("order_is_success",) + } + ) + entity_row.update( + { + "origin__temperature": origin_record.get("temperature", None), + "destination__temperature": destination_record.get("temperature", None), + } + ) + entity_row.update( + { + (f"global_stats__{k}" if full_feature_names else k): global_record.get( + k, None + ) + for k in ( + "num_rides", + "avg_ride_length", + ) + } + ) + + # get field_mapping_record by column name, but label by feature name + entity_row.update( + { + ( + f"field_mapping__{feature}" if full_feature_names else feature + ): field_mapping_record.get(column, None) + for ( + column, + feature, + ) in field_mapping_fv.batch_source.field_mapping.items() + } + ) + + # Convert records back to pandas dataframe + expected_df = pd.DataFrame(entity_rows) + + # Move "event_timestamp" column to front + current_cols = expected_df.columns.tolist() + current_cols.remove(event_timestamp) + expected_df = expected_df[[event_timestamp] + current_cols] + + # Cast some columns to expected types, since we lose information when converting pandas DFs into Python objects. + if full_feature_names: + expected_column_types = { + "order__order_is_success": "int32", + "driver_stats__conv_rate": "float32", + "customer_profile__current_balance": "float32", + "customer_profile__avg_passenger_count": "float32", + "global_stats__avg_ride_length": "float32", + "field_mapping__feature_name": "int32", + } + else: + expected_column_types = { + "order_is_success": "int32", + "conv_rate": "float32", + "current_balance": "float32", + "avg_passenger_count": "float32", + "avg_ride_length": "float32", + "feature_name": "int32", + } + + for col, typ in expected_column_types.items(): + expected_df[col] = expected_df[col].astype(typ) + + conv_feature_name = "driver_stats__conv_rate" if full_feature_names else "conv_rate" + conv_plus_feature_name = get_response_feature_name( + "conv_rate_plus_100", full_feature_names + ) + expected_df[conv_plus_feature_name] = expected_df[conv_feature_name] + 100 + expected_df[ + get_response_feature_name("conv_rate_plus_100_rounded", full_feature_names) + ] = ( + expected_df[conv_plus_feature_name] + .astype("float") + .round() + .astype(pd.Int32Dtype()) + ) + if "val_to_add" in expected_df.columns: + expected_df[ + get_response_feature_name("conv_rate_plus_val_to_add", full_feature_names) + ] = (expected_df[conv_feature_name] + expected_df["val_to_add"]) + + return expected_df + + +def get_response_feature_name(feature: str, full_feature_names: bool) -> str: + if feature in {"conv_rate", "avg_daily_trips"} and full_feature_names: + return f"driver_stats__{feature}" + + if ( + feature + in { + "conv_rate_plus_100", + "conv_rate_plus_100_rounded", + "conv_rate_plus_val_to_add", + } + and full_feature_names + ): + return f"conv_rate_plus_100__{feature}" + + return feature + + +def assert_feature_service_correctness( + store, feature_service, full_feature_names, entity_df, expected_df, event_timestamp +): + + job_from_df = store.get_historical_features( + entity_df=entity_df, + features=feature_service, + full_feature_names=full_feature_names, + ) + + actual_df_from_df_entities = job_from_df.to_df() + + expected_df = expected_df[ + [ + event_timestamp, + "order_id", + "driver_id", + "customer_id", + get_response_feature_name("conv_rate", full_feature_names), + get_response_feature_name("conv_rate_plus_100", full_feature_names), + "driver_age", + ] + ] + + validate_dataframes( + expected_df, + actual_df_from_df_entities, + keys=[event_timestamp, "order_id", "driver_id", "customer_id"], + ) + + +def assert_feature_service_entity_mapping_correctness( + store, feature_service, full_feature_names, entity_df, expected_df, event_timestamp +): + if full_feature_names: + job_from_df = store.get_historical_features( + entity_df=entity_df, + features=feature_service, + full_feature_names=full_feature_names, + ) + actual_df_from_df_entities = job_from_df.to_df() + + expected_df: pd.DataFrame = ( + expected_df.sort_values( + by=[ + event_timestamp, + "order_id", + "driver_id", + "customer_id", + "origin_id", + "destination_id", + ] + ) + .drop_duplicates() + .reset_index(drop=True) + ) + expected_df = expected_df[ + [ + event_timestamp, + "order_id", + "driver_id", + "customer_id", + "origin_id", + "destination_id", + "origin__temperature", + "destination__temperature", + ] + ] + + validate_dataframes( + expected_df, + actual_df_from_df_entities, + keys=[ + event_timestamp, + "order_id", + "driver_id", + "customer_id", + "origin_id", + "destination_id", + ], + ) + else: + # using 2 of the same FeatureView without full_feature_names=True will result in collision + with pytest.raises(FeatureNameCollisionError): + job_from_df = store.get_historical_features( + entity_df=entity_df, + features=feature_service, + full_feature_names=full_feature_names, + ) + + +def validate_dataframes(expected_df, actual_df, keys): + expected_df: pd.DataFrame = ( + expected_df.sort_values(by=keys).drop_duplicates().reset_index(drop=True) + ) + + actual_df = ( + actual_df[expected_df.columns] + .sort_values(by=keys) + .drop_duplicates() + .reset_index(drop=True) + ) + + pd_assert_frame_equal( + expected_df, + actual_df, + check_dtype=False, + ) + + +def _get_feature_view_ttl( + feature_view: FeatureView, default_ttl: timedelta +) -> timedelta: + """Returns the ttl of a feature view if it is non-zero. Otherwise returns the specified default.""" + return feature_view.ttl if feature_view.ttl else default_ttl + + +def validate_online_features( + store: FeatureStore, driver_df: pd.DataFrame, max_date: datetime +): + """Assert that features in online store are up to date with `max_date` date.""" + # Read features back + response = store.get_online_features( + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:avg_daily_trips", + "global_daily_stats:num_rides", + "global_daily_stats:avg_ride_length", + ], + entity_rows=[{"driver_id": 1001}], + full_feature_names=True, + ) + + # Float features should still be floats. + assert ( + response.proto.results[ + list(response.proto.metadata.feature_names.val).index( + "driver_hourly_stats__conv_rate" + ) + ] + .values[0] + .float_val + > 0 + ), response.to_dict() + + result = response.to_dict() + assert len(result) == 5 + assert "driver_hourly_stats__avg_daily_trips" in result + assert "driver_hourly_stats__conv_rate" in result + assert ( + abs( + result["driver_hourly_stats__conv_rate"][0] + - get_last_feature_row(driver_df, 1001, max_date)["conv_rate"] + ) + < 0.01 + ) + assert "global_daily_stats__num_rides" in result + assert "global_daily_stats__avg_ride_length" in result + + # Test the ODFV if it exists. + odfvs = store.list_on_demand_feature_views() + if odfvs and odfvs[0].name == "conv_rate_plus_100": + response = store.get_online_features( + features=[ + "conv_rate_plus_100:conv_rate_plus_100", + "conv_rate_plus_100:conv_rate_plus_val_to_add", + ], + entity_rows=[{"driver_id": 1001, "val_to_add": 100}], + full_feature_names=True, + ) + + # Check that float64 feature is stored correctly in proto format. + assert ( + response.proto.results[ + list(response.proto.metadata.feature_names.val).index( + "conv_rate_plus_100__conv_rate_plus_100" + ) + ] + .values[0] + .double_val + > 0 + ) + + result = response.to_dict() + assert len(result) == 3 + assert "conv_rate_plus_100__conv_rate_plus_100" in result + assert "conv_rate_plus_100__conv_rate_plus_val_to_add" in result + assert ( + abs( + result["conv_rate_plus_100__conv_rate_plus_100"][0] + - (get_last_feature_row(driver_df, 1001, max_date)["conv_rate"] + 100) + ) + < 0.01 + ) + assert ( + abs( + result["conv_rate_plus_100__conv_rate_plus_val_to_add"][0] + - (get_last_feature_row(driver_df, 1001, max_date)["conv_rate"] + 100) + ) + < 0.01 + ) + + +def get_last_feature_row(df: pd.DataFrame, driver_id, max_date: datetime): + """Manually extract last feature value from a dataframe for a given driver_id with up to `max_date` date""" + filtered = df[ + (df["driver_id"] == driver_id) + & (df["event_timestamp"] < max_date.replace(tzinfo=utc)) + ] + max_ts = filtered.loc[filtered["event_timestamp"].idxmax()]["event_timestamp"] + filtered_by_ts = filtered[filtered["event_timestamp"] == max_ts] + return filtered_by_ts.loc[filtered_by_ts["created"].idxmax()] diff --git a/sdk/python/tests/utils/http_server.py b/sdk/python/tests/utils/http_server.py new file mode 100644 index 0000000000..47c6cb8ac1 --- /dev/null +++ b/sdk/python/tests/utils/http_server.py @@ -0,0 +1,13 @@ +import socket +from contextlib import closing + + +def free_port(): + sock = socket.socket() + sock.bind(("", 0)) + return sock.getsockname()[1] + + +def check_port_open(host, port) -> bool: + with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: + return sock.connect_ex((host, port)) == 0 diff --git a/sdk/python/tests/utils/logged_features.py b/sdk/python/tests/utils/test_log_creator.py similarity index 60% rename from sdk/python/tests/utils/logged_features.py rename to sdk/python/tests/utils/test_log_creator.py index dc844a60b4..ec0d92814c 100644 --- a/sdk/python/tests/utils/logged_features.py +++ b/sdk/python/tests/utils/test_log_creator.py @@ -3,11 +3,12 @@ import tempfile import uuid from pathlib import Path -from typing import Iterator, Union +from typing import Iterator, List, Union import numpy as np import pandas as pd import pyarrow +import pytz from feast import FeatureService, FeatureStore, FeatureView from feast.errors import FeatureViewNotFoundException @@ -15,6 +16,63 @@ from feast.protos.feast.serving.ServingService_pb2 import FieldStatus +def get_latest_rows( + df: pd.DataFrame, join_key: str, entity_values: List[str] +) -> pd.DataFrame: + """ + Return latest rows in a dataframe based on join key and entity values. + + Args: + df: Dataframe of features values. + join_key : Join key for the feature values in the dataframe. + entity_values : Entity values for the feature values in the dataframe. + + Returns: + The most recent row in the dataframe. + """ + rows = df[df[join_key].isin(entity_values)] + return rows.loc[rows.groupby(join_key)["event_timestamp"].idxmax()] + + +def generate_expected_logs( + df: pd.DataFrame, + feature_view: FeatureView, + features: List[str], + join_keys: List[str], + timestamp_column: str, +) -> pd.DataFrame: + """ + Given dataframe and feature view, generate the expected logging dataframes that would be otherwise generated by our logging infrastructure. + Args: + df: Dataframe of features values returned in `get_online_features`. + feature_view : The feature view from which the features were retrieved. + features : The list of features defined as part of this base feature view. + join_keys : Join keys for the retrieved features. + timestamp_column : Timestamp column + + Returns: + Returns dataframe containing the expected logs. + """ + logs = pd.DataFrame() + for join_key in join_keys: + logs[join_key] = df[join_key] + + for feature in features: + col = f"{feature_view.name}__{feature}" + logs[col] = df[feature] + logs[f"{col}__timestamp"] = df[timestamp_column] + logs[f"{col}__status"] = FieldStatus.PRESENT + if feature_view.ttl: + logs[f"{col}__status"] = logs[f"{col}__status"].mask( + df[timestamp_column] + < datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) + - feature_view.ttl, + FieldStatus.OUTSIDE_MAX_AGE, + ) + + return logs.sort_values(by=join_keys).reset_index(drop=True) + + def prepare_logs( source_df: pd.DataFrame, feature_service: FeatureService, store: FeatureStore ) -> pd.DataFrame: diff --git a/sdk/python/tests/utils/test_wrappers.py b/sdk/python/tests/utils/test_wrappers.py new file mode 100644 index 0000000000..efee675790 --- /dev/null +++ b/sdk/python/tests/utils/test_wrappers.py @@ -0,0 +1,14 @@ +import pytest + + +def no_warnings(func): + def wrapper_no_warnings(*args, **kwargs): + with pytest.warns(None) as warnings: + func(*args, **kwargs) + + if len(warnings) > 0: + raise AssertionError( + "Warnings were raised: " + ", ".join([str(w) for w in warnings]) + ) + + return wrapper_no_warnings From 61a194cc312d6424091164d738dfdf1c1048e7b9 Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Fri, 29 Jul 2022 17:47:58 -0400 Subject: [PATCH 65/73] docs: Update intro documentation page (#2982) * GitBook: [#3] docs: update intro documentation page Signed-off-by: Danny Chiao * update main README blurb Signed-off-by: Danny Chiao * address comments Signed-off-by: Danny Chiao --- README.md | 9 +++++++- docs/README.md | 56 ++++++++++++++++++++++++++++++++------------------ 2 files changed, 44 insertions(+), 21 deletions(-) diff --git a/README.md b/README.md index 4616686e46..d367f291fa 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,14 @@ ## Overview -Feast is an open source feature store for machine learning. Feast is the fastest path to productionizing analytic data for model training and online inference. +Feast (**Fea**ture **St**ore) is an open source feature store for machine learning. Feast is the fastest path to manage existing infrastructure to productionize analytic data for model training and online inference. + + +Feast allows ML platform teams to: + +* **Make features consistently available for training and serving** by managing an _offline store_ (to process historical data for scale-out batch scoring or model training), a low-latency _online store_ (to power real-time prediction)_,_ and a battle-tested _feature server_ (for serving pre-computed features online). +* **Avoid data leakage** by generating point-in-time correct feature sets so data scientists can focus on feature engineering rather than debugging error-prone dataset joining logic. This ensure that future feature values do not leak to models during training. +* **Decouple ML from data infrastructure** by providing a single data access layer that abstracts feature storage from feature retrieval, ensuring models remain portable as you move from training models to serving models, from batch models to realtime models, and from one data infra system to another. Please see our [documentation](https://docs.feast.dev/) for more information about the project. diff --git a/docs/README.md b/docs/README.md index f8b9af3c32..1b70f8fedc 100644 --- a/docs/README.md +++ b/docs/README.md @@ -2,43 +2,59 @@ ## What is Feast? -Feast (**Fea**ture **St**ore) is an operational data system for managing and serving machine learning features to models in production. Feast is able to serve feature data to models from a low-latency online store (for real-time prediction) or from an offline store (for scale-out batch scoring or model training). +Feast (**Fea**ture **St**ore) is a customizable operational data system that re-uses existing infrastructure to manage and serve machine learning features to realtime models. -![](assets/feast-marchitecture.png) - -## Problems Feast Solves +Feast allows ML platform teams to: -**Models need consistent access to data:** Machine Learning (ML) systems built on traditional data infrastructure are often coupled to databases, object stores, streams, and files. A result of this coupling, however, is that any change in data infrastructure may break dependent ML systems. Another challenge is that dual implementations of data retrieval for training and serving can lead to inconsistencies in data, which in turn can lead to training-serving skew. +* **Make features consistently available for training and serving** by managing an _offline store_ (to process historical data for scale-out batch scoring or model training), a low-latency _online store_ (to power real-time prediction)_,_ and a battle-tested _feature server_ (for serving pre-computed features online). +* **Avoid data leakage** by generating point-in-time correct feature sets so data scientists can focus on feature engineering rather than debugging error-prone dataset joining logic. This ensure that future feature values do not leak to models during training. +* **Decouple ML from data infrastructure** by providing a single data access layer that abstracts feature storage from feature retrieval, ensuring models remain portable as you move from training models to serving models, from batch models to realtime models, and from one data infra system to another. -Feast decouples your models from your data infrastructure by providing a single data access layer that abstracts feature storage from feature retrieval. Feast also provides a consistent means of referencing feature data for retrieval, and therefore ensures that models remain portable when moving from training to serving. +{% hint style="info" %} +**Note:** Feast today primarily addresses _timestamped structured data_. +{% endhint %} -**Deploying new features into production is difficult:** Many ML teams consist of members with different objectives. Data scientists, for example, aim to deploy features into production as soon as possible, while engineers want to ensure that production systems remain stable. These differing objectives can create an organizational friction that slows time-to-market for new features. +![](assets/feast-marchitecture.png) -Feast addresses this friction by providing both a centralized registry to which data scientists can publish features and a battle-hardened serving layer. Together, these enable non-engineering teams to ship features into production with minimal oversight. +## Who is Feast for? -**Models need point-in-time correct data:** ML models in production require a view of data consistent with the one on which they are trained, otherwise the accuracy of these models could be compromised. Despite this need, many data science projects suffer from inconsistencies introduced by future feature values being leaked to models during training. +Feast helps ML platform teams with DevOps experience productionize real-time models. Feast can also help these teams build towards a feature platform that improves collaboration between engineers and data scientists. -Feast solves the challenge of data leakage by providing point-in-time correct feature retrieval when exporting feature datasets for model training. + -**Features aren't reused across projects:** Different teams within an organization are often unable to reuse features across projects. The siloed nature of development and the monolithic design of end-to-end ML systems contribute to duplication of feature creation and usage across teams and projects. +Feast is likely **not** the right tool if you -Feast addresses this problem by introducing feature reuse through a centralized registry. This registry enables multiple teams working on different projects not only to contribute features, but also to reuse these same features. With Feast, data scientists can start new ML projects by selecting previously engineered features from a centralized registry, and are no longer required to develop new features for each project. +* are in an organization that’s just getting started with ML and is not yet sure what the business impact of ML is +* rely primarily on unstructured data +* need very low latency feature retrieval (e.g. p99 feature retrieval << 10ms) +* have a small team to support a large number of use cases -## Problems Feast does not yet solve +## What Feast is not? -**Feature engineering:** We aim for Feast to support light-weight feature engineering as part of our API. +### Feast is not -**Feature discovery:** We also aim for Feast to include a first-class user interface for exploring and discovering entities and features. +* **an** [**ETL**](https://en.wikipedia.org/wiki/Extract,\_transform,\_load) / [**ELT**](https://en.wikipedia.org/wiki/Extract,\_load,\_transform) **system:** Feast is not (and does not plan to become) a general purpose data transformation or pipelining system. Users often leverage tools like [dbt](https://www.getdbt.com) to manage upstream data transformations. +* **a data orchestration tool:** Feast does not manage or orchestrate complex workflow DAGs. It relies on upstream data pipelines to produce feature values and integrations with tools like [Airflow](https://airflow.apache.org) to make features consistently available. +* **a data warehouse:** Feast is not a replacement for your data warehouse or the source of truth for all transformed data in your organization. Rather, Feast is a light-weight downstream layer that can serve data from an existing data warehouse (or other data sources) to models in production. +* **a database:** Feast is not a database, but helps manage data stored in other systems (e.g. BigQuery, Snowflake, DynamoDB, Redis) to make features consistently available at training / serving time -**Feature validation:** We additionally aim for Feast to improve support for statistics generation of feature data and subsequent validation of these statistics. Current support is limited. +### Feast does not _fully_ solve -## What Feast is not +* **reproducible model training / model backtesting / experiment management**: Feast captures feature and model metadata, but does not version-control datasets / labels or manage train / test splits. Other tools like [DVC](https://dvc.org/), [MLflow](https://www.mlflow.org/), and [Kubeflow](https://www.kubeflow.org/) are better suited for this. +* **batch + streaming feature engineering**: Feast primarily processes already transformed feature values (though it offers experimental light-weight transformations). Users usually integrate Feast with upstream systems (e.g. existing ETL/ELT pipelines). [Tecton](http://tecton.ai/) is a more fully featured feature platform which addresses these needs. +* **native streaming feature integration:** Feast enables users to push streaming features, but does not pull from streaming sources or manage streaming pipelines. [Tecton](http://tecton.ai/) is a more fully featured feature platform which orchestrates end to end streaming pipelines. +* **feature sharing**: Feast has experimental functionality to enable discovery and cataloguing of feature metadata with a [Feast web UI (alpha)](https://docs.feast.dev/reference/alpha-web-ui). Feast also has community contributed plugins with [DataHub](https://datahubproject.io/docs/generated/ingestion/sources/feast/) and [Amundsen](https://github.com/amundsen-io/amundsen/blob/4a9d60176767c4d68d1cad5b093320ea22e26a49/databuilder/databuilder/extractor/feast\_extractor.py). [Tecton](http://tecton.ai/) also more robustly addresses these needs. +* **lineage:** Feast helps tie feature values to model versions, but is not a complete solution for capturing end-to-end lineage from raw data sources to model versions. Feast also has community contributed plugins with [DataHub](https://datahubproject.io/docs/generated/ingestion/sources/feast/) and [Amundsen](https://github.com/amundsen-io/amundsen/blob/4a9d60176767c4d68d1cad5b093320ea22e26a49/databuilder/databuilder/extractor/feast\_extractor.py). [Tecton](http://tecton.ai/) captures more end-to-end lineage by also managing feature transformations. +* **data quality / drift detection**: Feast has experimental integrations with [Great Expectations](https://greatexpectations.io/), but is not purpose built to solve data drift / data quality issues. This requires more sophisticated monitoring across data pipelines, served feature values, labels, and model versions. -[**ETL**](https://en.wikipedia.org/wiki/Extract,\_transform,\_load) **or** [**ELT**](https://en.wikipedia.org/wiki/Extract,\_load,\_transform) **system:** Feast is not (and does not plan to become) a general purpose data transformation or pipelining system. Feast plans to include a light-weight feature engineering toolkit, but we encourage teams to integrate Feast with upstream ETL/ELT systems that are specialized in transformation. +## Example use cases -**Data warehouse:** Feast is not a replacement for your data warehouse or the source of truth for all transformed data in your organization. Rather, Feast is a light-weight downstream layer that can serve data from an existing data warehouse (or other data sources) to models in production. +Many companies have used Feast to power real-world ML use cases such as: -**Data catalog:** Feast is not a general purpose data catalog for your organization. Feast is purely focused on cataloging features for use in ML pipelines or systems, and only to the extent of facilitating the reuse of features. +* Personalizing online recommendations by leveraging pre-computed historical user or item features. +* Online fraud detection, using features that compare against (pre-computed) historical transaction patterns +* Churn prediction (an offline model), generating feature values for all users at a fixed cadence in batch +* Credit scoring, using pre-computed historical features to compute probability of default ## How can I get started? From 2ef71fc6b3ec4fca3b543f2f64bed765b09c3af4 Mon Sep 17 00:00:00 2001 From: sfc-gh-madkins <82121043+sfc-gh-madkins@users.noreply.github.com> Date: Fri, 29 Jul 2022 18:17:59 -0400 Subject: [PATCH 66/73] feat: Add Snowflake online store (again) (#2922) * feat: Add snowflake online store Signed-off-by: Miles Adkins * lint/format Signed-off-by: Miles Adkins * removing missing testing env variables Signed-off-by: Miles Adkins * test offline store first Signed-off-by: Miles Adkins * snowflake online test fixes Signed-off-by: Miles Adkins * format Signed-off-by: Miles Adkins * fix snowflake testing (#2903) Signed-off-by: Miles Adkins * change to transient table Signed-off-by: Miles Adkins * remove extra lines Signed-off-by: Miles Adkins * docs + nits Signed-off-by: Miles Adkins * docs + nits2 Signed-off-by: Miles Adkins * final docs + nits Signed-off-by: Miles Adkins * Update sdk/python/feast/infra/online_stores/snowflake.py Co-authored-by: Achal Shah Signed-off-by: Miles Adkins * Update sdk/python/feast/infra/online_stores/snowflake.py Co-authored-by: Achal Shah Signed-off-by: Miles Adkins * Update sdk/python/feast/infra/online_stores/snowflake.py Co-authored-by: Achal Shah Signed-off-by: Miles Adkins * Update sdk/python/feast/infra/online_stores/snowflake.py Co-authored-by: Achal Shah Signed-off-by: Miles Adkins * fix serialization Signed-off-by: Miles Adkins * fix serialization2 Signed-off-by: Miles Adkins Co-authored-by: Achal Shah --- docs/SUMMARY.md | 1 + docs/reference/offline-stores/snowflake.md | 4 +- docs/reference/online-stores/README.md | 2 + docs/reference/online-stores/snowflake.md | 35 +++ docs/roadmap.md | 3 +- docs/tutorials/driver-stats-on-snowflake.md | 10 +- sdk/python/docs/source/conf.py | 2 +- .../docs/source/feast.infra.online_stores.rst | 8 + sdk/python/docs/source/index.rst | 3 + .../feast/infra/online_stores/snowflake.py | 266 ++++++++++++++++++ .../feast/infra/utils/snowflake_utils.py | 185 +++++++++++- .../feast/templates/snowflake/bootstrap.py | 27 +- .../feast/templates/snowflake/driver_repo.py | 12 +- .../templates/snowflake/feature_store.yaml | 9 +- .../feature_repos/repo_configuration.py | 12 + 15 files changed, 542 insertions(+), 37 deletions(-) create mode 100644 docs/reference/online-stores/snowflake.md create mode 100644 sdk/python/feast/infra/online_stores/snowflake.py diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index cf10a528bd..88691d82f9 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -79,6 +79,7 @@ * [PostgreSQL (contrib)](reference/offline-stores/postgres.md) * [Online stores](reference/online-stores/README.md) * [SQLite](reference/online-stores/sqlite.md) + * [Snowflake](reference/online-stores/snowflake.md) * [Redis](reference/online-stores/redis.md) * [Datastore](reference/online-stores/datastore.md) * [DynamoDB](reference/online-stores/dynamodb.md) diff --git a/docs/reference/offline-stores/snowflake.md b/docs/reference/offline-stores/snowflake.md index aa006b43bb..e2afaef90d 100644 --- a/docs/reference/offline-stores/snowflake.md +++ b/docs/reference/offline-stores/snowflake.md @@ -2,7 +2,7 @@ ## Description -The Snowflake offline store provides support for reading [SnowflakeSources](../data-sources/snowflake.md). +The [Snowflake](https://trial.snowflake.com) offline store provides support for reading [SnowflakeSources](../data-sources/snowflake.md). * Snowflake tables and views are allowed as sources. * All joins happen within Snowflake. @@ -11,7 +11,7 @@ The Snowflake offline store provides support for reading [SnowflakeSources](../d * This allows you to call * `to_snowflake` to save the dataset into Snowflake * `to_sql` to get the SQL query that would execute on `to_df` - * `to_arrow_chunks` to get the result in batches ([Snowflake python connector docs](https://docs.snowflake.com/en/user-guide/python-connector-api.html#get_result_batches)) + * `to_arrow_chunks` to get the result in batches ([Snowflake python connector docs](https://docs.snowflake.com/en/user-guide/python-connector-api.html#get_result_batches)) ## Example diff --git a/docs/reference/online-stores/README.md b/docs/reference/online-stores/README.md index b3578b8539..5eb566af3c 100644 --- a/docs/reference/online-stores/README.md +++ b/docs/reference/online-stores/README.md @@ -4,6 +4,8 @@ Please see [Online Store](../../getting-started/architecture-and-components/onli {% page-ref page="sqlite.md" %} +{% page-ref page="snowflake.md" %} + {% page-ref page="redis.md" %} {% page-ref page="datastore.md" %} diff --git a/docs/reference/online-stores/snowflake.md b/docs/reference/online-stores/snowflake.md new file mode 100644 index 0000000000..ccf3d526da --- /dev/null +++ b/docs/reference/online-stores/snowflake.md @@ -0,0 +1,35 @@ +# Snowflake + +## Description + +The [Snowflake](https://trial.snowflake.com) online store provides support for materializing feature values into a Snowflake Transient Table for serving online features. + +* Only the latest feature values are persisted + +The data model for using a Snowflake Transient Table as an online store follows a tall format (one row per feature)): +* "entity_feature_key" (BINARY) -- unique key used when reading specific feature_view x entity combination +* "entity_key" (BINARY) -- repeated key currently unused for reading entity_combination +* "feature_name" (VARCHAR) +* "value" (BINARY) +* "event_ts" (TIMESTAMP) +* "created_ts" (TIMESTAMP) + + (This model may be subject to change when Snowflake Hybrid Tables are released) + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: local +online_store: + type: snowflake.online + account: SNOWFLAKE_DEPLOYMENT_URL + user: SNOWFLAKE_USER + password: SNOWFLAKE_PASSWORD + role: SNOWFLAKE_ROLE + warehouse: SNOWFLAKE_WAREHOUSE + database: SNOWFLAKE_DATABASE +``` +{% endcode %} diff --git a/docs/roadmap.md b/docs/roadmap.md index efe2164b9c..e481453dff 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -27,6 +27,7 @@ The list below contains the functionality that contributors are planning to deve * [x] [In-memory / Pandas](https://docs.feast.dev/reference/offline-stores/file) * [x] [Custom offline store support](https://docs.feast.dev/how-to-guides/adding-a-new-offline-store) * **Online Stores** + * [x] [Snowflake](https://docs.feast.dev/reference/online-stores/snowflake) * [x] [DynamoDB](https://docs.feast.dev/reference/online-stores/dynamodb) * [x] [Redis](https://docs.feast.dev/reference/online-stores/redis) * [x] [Datastore](https://docs.feast.dev/reference/online-stores/datastore) @@ -59,4 +60,4 @@ The list below contains the functionality that contributors are planning to deve * [x] Model-centric feature tracking (feature services) * [x] Amundsen integration (see [Feast extractor](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/feast_extractor.py)) * [x] DataHub integration (see [DataHub Feast docs](https://datahubproject.io/docs/generated/ingestion/sources/feast/)) - * [x] Feast Web UI (Alpha release. See [docs](https://docs.feast.dev/reference/alpha-web-ui)) \ No newline at end of file + * [x] Feast Web UI (Alpha release. See [docs](https://docs.feast.dev/reference/alpha-web-ui)) diff --git a/docs/tutorials/driver-stats-on-snowflake.md b/docs/tutorials/driver-stats-on-snowflake.md index 01b158cb1a..306ae2f59b 100644 --- a/docs/tutorials/driver-stats-on-snowflake.md +++ b/docs/tutorials/driver-stats-on-snowflake.md @@ -1,6 +1,6 @@ --- description: >- - Initial demonstration of Snowflake as an offline store with Feast, using the Snowflake demo template. + Initial demonstration of Snowflake as an offline+online store with Feast, using the Snowflake demo template. --- # Drivers stats on Snowflake @@ -61,6 +61,14 @@ offline_store: role: ROLE_NAME #case sensitive warehouse: WAREHOUSE_NAME #case sensitive database: DATABASE_NAME #case cap sensitive +online_store: + type: snowflake.online + account: SNOWFLAKE_DEPLOYMENT_URL #drop .snowflakecomputing.com + user: USERNAME + password: PASSWORD + role: ROLE_NAME #case sensitive + warehouse: WAREHOUSE_NAME #case sensitive + database: DATABASE_NAME #case cap sensitive ``` {% endcode %} diff --git a/sdk/python/docs/source/conf.py b/sdk/python/docs/source/conf.py index b311a19664..8f873d21b6 100644 --- a/sdk/python/docs/source/conf.py +++ b/sdk/python/docs/source/conf.py @@ -115,7 +115,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = [] +html_static_path = ["_static"] # -- Options for HTMLHelp output ------------------------------------------ diff --git a/sdk/python/docs/source/feast.infra.online_stores.rst b/sdk/python/docs/source/feast.infra.online_stores.rst index 842522c9d7..65758c409c 100644 --- a/sdk/python/docs/source/feast.infra.online_stores.rst +++ b/sdk/python/docs/source/feast.infra.online_stores.rst @@ -52,6 +52,14 @@ feast.infra.online\_stores.redis module :undoc-members: :show-inheritance: +feast.infra.online\_stores.snowflake module +------------------------------------------- + +.. automodule:: feast.infra.online_stores.snowflake + :members: + :undoc-members: + :show-inheritance: + feast.infra.online\_stores.sqlite module ---------------------------------------- diff --git a/sdk/python/docs/source/index.rst b/sdk/python/docs/source/index.rst index bc034c295d..07b9d9a77e 100644 --- a/sdk/python/docs/source/index.rst +++ b/sdk/python/docs/source/index.rst @@ -250,18 +250,21 @@ Sqlite Online Store .. automodule:: feast.infra.online_stores.sqlite :members: + :noindex: Datastore Online Store ---------------------- .. automodule:: feast.infra.online_stores.datastore :members: + :noindex: DynamoDB Online Store --------------------- .. automodule:: feast.infra.online_stores.dynamodb :members: + :noindex: Redis Online Store ------------------ diff --git a/sdk/python/feast/infra/online_stores/snowflake.py b/sdk/python/feast/infra/online_stores/snowflake.py new file mode 100644 index 0000000000..e5e7b680be --- /dev/null +++ b/sdk/python/feast/infra/online_stores/snowflake.py @@ -0,0 +1,266 @@ +import itertools +import os +from binascii import hexlify +from datetime import datetime +from pathlib import Path +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple + +import pandas as pd +import pytz +from pydantic import Field +from pydantic.schema import Literal + +from feast.entity import Entity +from feast.feature_view import FeatureView +from feast.infra.key_encoding_utils import serialize_entity_key +from feast.infra.online_stores.online_store import OnlineStore +from feast.infra.utils.snowflake_utils import get_snowflake_conn, write_pandas_binary +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto +from feast.repo_config import FeastConfigBaseModel, RepoConfig +from feast.usage import log_exceptions_and_usage + + +class SnowflakeOnlineStoreConfig(FeastConfigBaseModel): + """Online store config for Snowflake""" + + type: Literal["snowflake.online"] = "snowflake.online" + """ Online store type selector""" + + config_path: Optional[str] = ( + Path(os.environ["HOME"]) / ".snowsql/config" + ).__str__() + """ Snowflake config path -- absolute path required (Can't use ~)""" + + account: Optional[str] = None + """ Snowflake deployment identifier -- drop .snowflakecomputing.com""" + + user: Optional[str] = None + """ Snowflake user name """ + + password: Optional[str] = None + """ Snowflake password """ + + role: Optional[str] = None + """ Snowflake role name""" + + warehouse: Optional[str] = None + """ Snowflake warehouse name """ + + database: Optional[str] = None + """ Snowflake database name """ + + schema_: Optional[str] = Field("PUBLIC", alias="schema") + """ Snowflake schema name """ + + class Config: + allow_population_by_field_name = True + + +class SnowflakeOnlineStore(OnlineStore): + @log_exceptions_and_usage(online_store="snowflake") + def online_write_batch( + self, + config: RepoConfig, + table: FeatureView, + data: List[ + Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] + ], + progress: Optional[Callable[[int], Any]], + ) -> None: + assert isinstance(config.online_store, SnowflakeOnlineStoreConfig) + + dfs = [None] * len(data) + for i, (entity_key, values, timestamp, created_ts) in enumerate(data): + df = pd.DataFrame( + columns=[ + "entity_feature_key", + "entity_key", + "feature_name", + "value", + "event_ts", + "created_ts", + ], + index=range(0, len(values)), + ) + + timestamp = _to_naive_utc(timestamp) + if created_ts is not None: + created_ts = _to_naive_utc(created_ts) + + entity_key_serialization_version = ( + config.entity_key_serialization_version + if config.entity_key_serialization_version + else 2 + ) + for j, (feature_name, val) in enumerate(values.items()): + df.loc[j, "entity_feature_key"] = serialize_entity_key( + entity_key, + entity_key_serialization_version=entity_key_serialization_version, + ) + bytes(feature_name, encoding="utf-8") + df.loc[j, "entity_key"] = serialize_entity_key( + entity_key, + entity_key_serialization_version=entity_key_serialization_version, + ) + df.loc[j, "feature_name"] = feature_name + df.loc[j, "value"] = val.SerializeToString() + df.loc[j, "event_ts"] = timestamp + df.loc[j, "created_ts"] = created_ts + + dfs[i] = df + + if dfs: + agg_df = pd.concat(dfs) + + # This combines both the data upload plus the overwrite in the same transaction + with get_snowflake_conn(config.online_store, autocommit=False) as conn: + write_pandas_binary( + conn, agg_df, f"[online-transient] {config.project}_{table.name}" + ) # special function for writing binary to snowflake + + query = f""" + INSERT OVERWRITE INTO "{config.online_store.database}"."{config.online_store.schema_}"."[online-transient] {config.project}_{table.name}" + SELECT + "entity_feature_key", + "entity_key", + "feature_name", + "value", + "event_ts", + "created_ts" + FROM + (SELECT + *, + ROW_NUMBER() OVER(PARTITION BY "entity_key","feature_name" ORDER BY "event_ts" DESC, "created_ts" DESC) AS "_feast_row" + FROM + "{config.online_store.database}"."{config.online_store.schema_}"."[online-transient] {config.project}_{table.name}") + WHERE + "_feast_row" = 1; + """ + + conn.cursor().execute(query) + + if progress: + progress(len(data)) + + return None + + @log_exceptions_and_usage(online_store="snowflake") + def online_read( + self, + config: RepoConfig, + table: FeatureView, + entity_keys: List[EntityKeyProto], + requested_features: List[str], + ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: + assert isinstance(config.online_store, SnowflakeOnlineStoreConfig) + + result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] + + entity_fetch_str = ",".join( + [ + ( + "TO_BINARY(" + + hexlify( + serialize_entity_key(combo[0]) + + bytes(combo[1], encoding="utf-8") + ).__str__()[1:] + + ")" + ) + for combo in itertools.product(entity_keys, requested_features) + ] + ) + + with get_snowflake_conn(config.online_store) as conn: + + df = ( + conn.cursor() + .execute( + f""" + SELECT + "entity_key", "feature_name", "value", "event_ts" + FROM + "{config.online_store.database}"."{config.online_store.schema_}"."[online-transient] {config.project}_{table.name}" + WHERE + "entity_feature_key" IN ({entity_fetch_str}) + """, + ) + .fetch_pandas_all() + ) + + entity_key_serialization_version = ( + config.entity_key_serialization_version + if config.entity_key_serialization_version + else 2 + ) + for entity_key in entity_keys: + entity_key_bin = serialize_entity_key( + entity_key, + entity_key_serialization_version=entity_key_serialization_version, + ) + res = {} + res_ts = None + for index, row in df[df["entity_key"] == entity_key_bin].iterrows(): + val = ValueProto() + val.ParseFromString(row["value"]) + res[row["feature_name"]] = val + res_ts = row["event_ts"].to_pydatetime() + + if not res: + result.append((None, None)) + else: + result.append((res_ts, res)) + return result + + @log_exceptions_and_usage(online_store="snowflake") + def update( + self, + config: RepoConfig, + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], + entities_to_delete: Sequence[Entity], + entities_to_keep: Sequence[Entity], + partial: bool, + ): + assert isinstance(config.online_store, SnowflakeOnlineStoreConfig) + + with get_snowflake_conn(config.online_store) as conn: + + for table in tables_to_keep: + + conn.cursor().execute( + f"""CREATE TRANSIENT TABLE IF NOT EXISTS "{config.online_store.database}"."{config.online_store.schema_}"."[online-transient] {config.project}_{table.name}" ( + "entity_feature_key" BINARY, + "entity_key" BINARY, + "feature_name" VARCHAR, + "value" BINARY, + "event_ts" TIMESTAMP, + "created_ts" TIMESTAMP + )""" + ) + + for table in tables_to_delete: + + conn.cursor().execute( + f'DROP TABLE IF EXISTS "{config.online_store.database}"."{config.online_store.schema_}"."[online-transient] {config.project}_{table.name}"' + ) + + def teardown( + self, + config: RepoConfig, + tables: Sequence[FeatureView], + entities: Sequence[Entity], + ): + assert isinstance(config.online_store, SnowflakeOnlineStoreConfig) + + with get_snowflake_conn(config.online_store) as conn: + + for table in tables: + query = f'DROP TABLE IF EXISTS "{config.online_store.database}"."{config.online_store.schema_}"."[online-transient] {config.project}_{table.name}"' + conn.cursor().execute(query) + + +def _to_naive_utc(ts: datetime): + if ts.tzinfo is None: + return ts + else: + return ts.astimezone(pytz.utc).replace(tzinfo=None) diff --git a/sdk/python/feast/infra/utils/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake_utils.py index 78d505bd08..f54288e45d 100644 --- a/sdk/python/feast/infra/utils/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake_utils.py @@ -44,8 +44,12 @@ def execute_snowflake_statement(conn: SnowflakeConnection, query) -> SnowflakeCu def get_snowflake_conn(config, autocommit=True) -> SnowflakeConnection: - assert config.type == "snowflake.offline" - config_header = "connections.feast_offline_store" + assert config.type in ["snowflake.offline", "snowflake.online"] + + if config.type == "snowflake.offline": + config_header = "connections.feast_offline_store" + elif config.type == "snowflake.online": + config_header = "connections.feast_online_store" config_dict = dict(config) @@ -122,8 +126,8 @@ def write_pandas( conn: Connection to be used to communicate with Snowflake. df: Dataframe we'd like to write back. table_name: Table name where we want to insert into. - database: Database schema and table is in, if not provided the default one will be used (Default value = None). - schema: Schema table is in, if not provided the default one will be used (Default value = None). + database: Database table is in, if not provided the connection one will be used. + schema: Schema table is in, if not provided the connection one will be used. chunk_size: Number of elements to be inserted once, if not provided all elements will be dumped once (Default value = None). compression: The compression used on the Parquet files, can only be gzip, or snappy. Gzip gives supposedly a @@ -432,3 +436,176 @@ def parse_private_key_path(key_path: str, private_key_passphrase: str) -> bytes: ) return pkb + + +def write_pandas_binary( + conn: SnowflakeConnection, + df: pd.DataFrame, + table_name: str, + database: Optional[str] = None, + schema: Optional[str] = None, + chunk_size: Optional[int] = None, + compression: str = "gzip", + on_error: str = "abort_statement", + parallel: int = 4, + quote_identifiers: bool = True, + auto_create_table: bool = False, + create_temp_table: bool = False, +): + """Allows users to most efficiently write back a pandas DataFrame to Snowflake. + + It works by dumping the DataFrame into Parquet files, uploading them and finally copying their data into the table. + + Returns whether all files were ingested correctly, number of chunks uploaded, and number of rows ingested + with all of the COPY INTO command's output for debugging purposes. + + Example usage: + import pandas + from snowflake.connector.pandas_tools import write_pandas + + df = pandas.DataFrame([('Mark', 10), ('Luke', 20)], columns=['name', 'balance']) + success, nchunks, nrows, _ = write_pandas(cnx, df, 'customers') + + Args: + conn: Connection to be used to communicate with Snowflake. + df: Dataframe we'd like to write back. + table_name: Table name where we want to insert into. + database: Database table is in, if not provided the connection one will be used. + schema: Schema table is in, if not provided the connection one will be used. + chunk_size: Number of elements to be inserted once, if not provided all elements will be dumped once + (Default value = None). + compression: The compression used on the Parquet files, can only be gzip, or snappy. Gzip gives supposedly a + better compression, while snappy is faster. Use whichever is more appropriate (Default value = 'gzip'). + on_error: Action to take when COPY INTO statements fail, default follows documentation at: + https://docs.snowflake.com/en/sql-reference/sql/copy-into-table.html#copy-options-copyoptions + (Default value = 'abort_statement'). + parallel: Number of threads to be used when uploading chunks, default follows documentation at: + https://docs.snowflake.com/en/sql-reference/sql/put.html#optional-parameters (Default value = 4). + quote_identifiers: By default, identifiers, specifically database, schema, table and column names + (from df.columns) will be quoted. If set to False, identifiers are passed on to Snowflake without quoting. + I.e. identifiers will be coerced to uppercase by Snowflake. (Default value = True) + auto_create_table: When true, will automatically create a table with corresponding columns for each column in + the passed in DataFrame. The table will not be created if it already exists + create_temp_table: Will make the auto-created table as a temporary table + """ + if database is not None and schema is None: + raise ProgrammingError( + "Schema has to be provided to write_pandas when a database is provided" + ) + # This dictionary maps the compression algorithm to Snowflake put copy into command type + # https://docs.snowflake.com/en/sql-reference/sql/copy-into-table.html#type-parquet + compression_map = {"gzip": "auto", "snappy": "snappy"} + if compression not in compression_map.keys(): + raise ProgrammingError( + "Invalid compression '{}', only acceptable values are: {}".format( + compression, compression_map.keys() + ) + ) + if quote_identifiers: + location = ( + (('"' + database + '".') if database else "") + + (('"' + schema + '".') if schema else "") + + ('"' + table_name + '"') + ) + else: + location = ( + (database + "." if database else "") + + (schema + "." if schema else "") + + (table_name) + ) + if chunk_size is None: + chunk_size = len(df) + cursor: SnowflakeCursor = conn.cursor() + stage_name = create_temporary_sfc_stage(cursor) + + with TemporaryDirectory() as tmp_folder: + for i, chunk in chunk_helper(df, chunk_size): + chunk_path = os.path.join(tmp_folder, "file{}.txt".format(i)) + # Dump chunk into parquet file + chunk.to_parquet( + chunk_path, + compression=compression, + use_deprecated_int96_timestamps=True, + ) + # Upload parquet file + upload_sql = ( + "PUT /* Python:snowflake.connector.pandas_tools.write_pandas() */ " + "'file://{path}' @\"{stage_name}\" PARALLEL={parallel}" + ).format( + path=chunk_path.replace("\\", "\\\\").replace("'", "\\'"), + stage_name=stage_name, + parallel=parallel, + ) + logger.debug(f"uploading files with '{upload_sql}'") + cursor.execute(upload_sql, _is_internal=True) + # Remove chunk file + os.remove(chunk_path) + if quote_identifiers: + columns = '"' + '","'.join(list(df.columns)) + '"' + else: + columns = ",".join(list(df.columns)) + + if auto_create_table: + file_format_name = create_file_format(compression, compression_map, cursor) + infer_schema_sql = f"SELECT COLUMN_NAME, TYPE FROM table(infer_schema(location=>'@\"{stage_name}\"', file_format=>'{file_format_name}'))" + logger.debug(f"inferring schema with '{infer_schema_sql}'") + result_cursor = cursor.execute(infer_schema_sql, _is_internal=True) + if result_cursor is None: + raise SnowflakeQueryUnknownError(infer_schema_sql) + result = cast(List[Tuple[str, str]], result_cursor.fetchall()) + column_type_mapping: Dict[str, str] = dict(result) + # Infer schema can return the columns out of order depending on the chunking we do when uploading + # so we have to iterate through the dataframe columns to make sure we create the table with its + # columns in order + quote = '"' if quote_identifiers else "" + create_table_columns = ", ".join( + [f"{quote}{c}{quote} {column_type_mapping[c]}" for c in df.columns] + ) + create_table_sql = ( + f"CREATE {'TEMP ' if create_temp_table else ''}TABLE IF NOT EXISTS {location} " + f"({create_table_columns})" + f" /* Python:snowflake.connector.pandas_tools.write_pandas() */ " + ) + logger.debug(f"auto creating table with '{create_table_sql}'") + cursor.execute(create_table_sql, _is_internal=True) + drop_file_format_sql = f"DROP FILE FORMAT IF EXISTS {file_format_name}" + logger.debug(f"dropping file format with '{drop_file_format_sql}'") + cursor.execute(drop_file_format_sql, _is_internal=True) + + # in Snowflake, all parquet data is stored in a single column, $1, so we must select columns explicitly + # see (https://docs.snowflake.com/en/user-guide/script-data-load-transform-parquet.html) + if quote_identifiers: + parquet_columns = ",".join( + f'TO_BINARY($1:"{c}")' + if c in ["entity_feature_key", "entity_key", "value"] + else f'$1:"{c}"' + for c in df.columns + ) + else: + parquet_columns = ",".join( + f"TO_BINARY($1:{c})" + if c in ["entity_feature_key", "entity_key", "value"] + else f"$1:{c}" + for c in df.columns + ) + + copy_into_sql = ( + "COPY INTO {location} /* Python:snowflake.connector.pandas_tools.write_pandas() */ " + "({columns}) " + 'FROM (SELECT {parquet_columns} FROM @"{stage_name}") ' + "FILE_FORMAT=(TYPE=PARQUET COMPRESSION={compression} BINARY_AS_TEXT = FALSE) " + "PURGE=TRUE ON_ERROR={on_error}" + ).format( + location=location, + columns=columns, + parquet_columns=parquet_columns, + stage_name=stage_name, + compression=compression_map[compression], + on_error=on_error, + ) + logger.debug("copying into with '{}'".format(copy_into_sql)) + # Snowflake returns the original cursor if the query execution succeeded. + result_cursor = cursor.execute(copy_into_sql, _is_internal=True) + if result_cursor is None: + raise SnowflakeQueryUnknownError(copy_into_sql) + result_cursor.close() diff --git a/sdk/python/feast/templates/snowflake/bootstrap.py b/sdk/python/feast/templates/snowflake/bootstrap.py index 194ba08c08..1663a1fb8b 100644 --- a/sdk/python/feast/templates/snowflake/bootstrap.py +++ b/sdk/python/feast/templates/snowflake/bootstrap.py @@ -13,7 +13,6 @@ def bootstrap(): from feast.driver_test_data import create_driver_hourly_stats_df repo_path = pathlib.Path(__file__).parent.absolute() - config_file = repo_path / "feature_store.yaml" project_name = str(repo_path)[str(repo_path).rfind("/") + 1 :] @@ -23,7 +22,6 @@ def bootstrap(): driver_entities = [1001, 1002, 1003, 1004, 1005] driver_df = create_driver_hourly_stats_df(driver_entities, start_date, end_date) - repo_path = pathlib.Path(__file__).parent.absolute() data_path = repo_path / "data" data_path.mkdir(exist_ok=True) driver_stats_path = data_path / "driver_stats.parquet" @@ -38,6 +36,17 @@ def bootstrap(): snowflake_warehouse = click.prompt("Snowflake Warehouse Name (Case Sensitive):") snowflake_database = click.prompt("Snowflake Database Name (Case Sensitive):") + config_file = repo_path / "feature_store.yaml" + for i in range(2): + replace_str_in_file( + config_file, "SNOWFLAKE_DEPLOYMENT_URL", snowflake_deployment_url + ) + replace_str_in_file(config_file, "SNOWFLAKE_USER", snowflake_user) + replace_str_in_file(config_file, "SNOWFLAKE_PASSWORD", snowflake_password) + replace_str_in_file(config_file, "SNOWFLAKE_ROLE", snowflake_role) + replace_str_in_file(config_file, "SNOWFLAKE_WAREHOUSE", snowflake_warehouse) + replace_str_in_file(config_file, "SNOWFLAKE_DATABASE", snowflake_database) + if click.confirm( f'Should I upload example data to Snowflake (overwriting "{project_name}_feast_driver_hourly_stats" table)?', default=True, @@ -66,20 +75,6 @@ def bootstrap(): ) conn.close() - repo_path = pathlib.Path(__file__).parent.absolute() - config_file = repo_path / "feature_store.yaml" - driver_file = repo_path / "driver_repo.py" - replace_str_in_file( - config_file, "SNOWFLAKE_DEPLOYMENT_URL", snowflake_deployment_url - ) - replace_str_in_file(config_file, "SNOWFLAKE_USER", snowflake_user) - replace_str_in_file(config_file, "SNOWFLAKE_PASSWORD", snowflake_password) - replace_str_in_file(config_file, "SNOWFLAKE_ROLE", snowflake_role) - replace_str_in_file(config_file, "SNOWFLAKE_WAREHOUSE", snowflake_warehouse) - replace_str_in_file(config_file, "SNOWFLAKE_DATABASE", snowflake_database) - - replace_str_in_file(driver_file, "SNOWFLAKE_WAREHOUSE", snowflake_warehouse) - def replace_str_in_file(file_path, match_str, sub_str): with open(file_path, "r") as f: diff --git a/sdk/python/feast/templates/snowflake/driver_repo.py b/sdk/python/feast/templates/snowflake/driver_repo.py index 297a3f5ef0..54f6b67126 100644 --- a/sdk/python/feast/templates/snowflake/driver_repo.py +++ b/sdk/python/feast/templates/snowflake/driver_repo.py @@ -2,8 +2,7 @@ import yaml -from feast import Entity, FeatureService, FeatureView, Field, SnowflakeSource -from feast.types import Float32, Int64 +from feast import Entity, FeatureService, FeatureView, SnowflakeSource # Define an entity for the driver. Entities can be thought of as primary keys used to # retrieve features. Entities are also used to join multiple tables/views during the @@ -25,7 +24,6 @@ # The Snowflake table where features can be found database=yaml.safe_load(open("feature_store.yaml"))["offline_store"]["database"], table=f"{project_name}_feast_driver_hourly_stats", - warehouse="SNOWFLAKE_WAREHOUSE", # The event timestamp is used for point-in-time joins and for ensuring only # features within the TTL are returned timestamp_field="event_timestamp", @@ -51,14 +49,6 @@ # amount of historical scanning required for historical feature values # during retrieval ttl=timedelta(weeks=52), - # The list of features defined below act as a schema to both define features - # for both materialization of features into a store, and are used as references - # during retrieval for building a training dataset or serving features - schema=[ - Field(name="conv_rate", dtype=Float32), - Field(name="acc_rate", dtype=Float32), - Field(name="avg_daily_trips", dtype=Int64), - ], # Batch sources are used to find feature values. In the case of this feature # view we will query a source table on Redshift for driver statistics # features diff --git a/sdk/python/feast/templates/snowflake/feature_store.yaml b/sdk/python/feast/templates/snowflake/feature_store.yaml index 948869897b..3e2e3c3cea 100644 --- a/sdk/python/feast/templates/snowflake/feature_store.yaml +++ b/sdk/python/feast/templates/snowflake/feature_store.yaml @@ -9,4 +9,11 @@ offline_store: role: SNOWFLAKE_ROLE warehouse: SNOWFLAKE_WAREHOUSE database: SNOWFLAKE_DATABASE -entity_key_serialization_version: 2 +online_store: + type: snowflake.online + account: SNOWFLAKE_DEPLOYMENT_URL + user: SNOWFLAKE_USER + password: SNOWFLAKE_PASSWORD + role: SNOWFLAKE_ROLE + warehouse: SNOWFLAKE_WAREHOUSE + database: SNOWFLAKE_DATABASE diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index bad2f52906..8f1aa51b19 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -73,6 +73,17 @@ "connection_string": "127.0.0.1:6001,127.0.0.1:6002,127.0.0.1:6003", } +SNOWFLAKE_CONFIG = { + "type": "snowflake.online", + "account": os.environ["SNOWFLAKE_CI_DEPLOYMENT"], + "user": os.environ["SNOWFLAKE_CI_USER"], + "password": os.environ["SNOWFLAKE_CI_PASSWORD"], + "role": os.environ["SNOWFLAKE_CI_ROLE"], + "warehouse": os.environ["SNOWFLAKE_CI_WAREHOUSE"], + "database": "FEAST", + "schema": "ONLINE", +} + OFFLINE_STORE_TO_PROVIDER_CONFIG: Dict[str, DataSourceCreator] = { "file": ("local", FileDataSourceCreator), "bigquery": ("gcp", BigQueryDataSourceCreator), @@ -103,6 +114,7 @@ AVAILABLE_ONLINE_STORES["redis"] = (REDIS_CONFIG, None) AVAILABLE_ONLINE_STORES["dynamodb"] = (DYNAMO_CONFIG, None) AVAILABLE_ONLINE_STORES["datastore"] = ("datastore", None) + AVAILABLE_ONLINE_STORES["snowflake"] = (SNOWFLAKE_CONFIG, None) full_repo_configs_module = os.environ.get(FULL_REPO_CONFIGS_MODULE_ENV_NAME) From 5358f0d65cc993cf5bad3ed342e6dd70adfe51d0 Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Mon, 1 Aug 2022 00:31:24 -0600 Subject: [PATCH 67/73] docs: Updated quickstart docs to import FeatureService (#2987) docs: updated quickstart docs to import FeatureService Signed-off-by: Francisco Javier Arceo --- docs/getting-started/quickstart.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting-started/quickstart.md b/docs/getting-started/quickstart.md index 972ffa13a9..7bbcb78732 100644 --- a/docs/getting-started/quickstart.md +++ b/docs/getting-started/quickstart.md @@ -82,7 +82,7 @@ online_store: from datetime import timedelta -from feast import Entity, FeatureView, Field, FileSource, ValueType +from feast import Entity, FeatureService, FeatureView, Field, FileSource, ValueType from feast.types import Float32, Int64 # Read data from parquet files. Parquet is convenient for local development mode. For From 651ce341687034ce07ca959f805f3c90dccfd4cc Mon Sep 17 00:00:00 2001 From: sfc-gh-madkins <82121043+sfc-gh-madkins@users.noreply.github.com> Date: Mon, 1 Aug 2022 12:24:21 -0400 Subject: [PATCH 68/73] fix: Snowflake_online_read fix (#2988) Signed-off-by: Miles Adkins --- .../feast/infra/online_stores/snowflake.py | 19 ++++++++++--------- .../templates/snowflake/feature_store.yaml | 1 + 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/sdk/python/feast/infra/online_stores/snowflake.py b/sdk/python/feast/infra/online_stores/snowflake.py index e5e7b680be..73c68e4bc0 100644 --- a/sdk/python/feast/infra/online_stores/snowflake.py +++ b/sdk/python/feast/infra/online_stores/snowflake.py @@ -96,11 +96,11 @@ def online_write_batch( for j, (feature_name, val) in enumerate(values.items()): df.loc[j, "entity_feature_key"] = serialize_entity_key( entity_key, - entity_key_serialization_version=entity_key_serialization_version, + entity_key_serialization_version, ) + bytes(feature_name, encoding="utf-8") df.loc[j, "entity_key"] = serialize_entity_key( entity_key, - entity_key_serialization_version=entity_key_serialization_version, + entity_key_serialization_version, ) df.loc[j, "feature_name"] = feature_name df.loc[j, "value"] = val.SerializeToString() @@ -156,12 +156,18 @@ def online_read( result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] + entity_key_serialization_version = ( + config.entity_key_serialization_version + if config.entity_key_serialization_version + else 2 + ) + entity_fetch_str = ",".join( [ ( "TO_BINARY(" + hexlify( - serialize_entity_key(combo[0]) + serialize_entity_key(combo[0], entity_key_serialization_version) + bytes(combo[1], encoding="utf-8") ).__str__()[1:] + ")" @@ -187,15 +193,10 @@ def online_read( .fetch_pandas_all() ) - entity_key_serialization_version = ( - config.entity_key_serialization_version - if config.entity_key_serialization_version - else 2 - ) for entity_key in entity_keys: entity_key_bin = serialize_entity_key( entity_key, - entity_key_serialization_version=entity_key_serialization_version, + entity_key_serialization_version, ) res = {} res_ts = None diff --git a/sdk/python/feast/templates/snowflake/feature_store.yaml b/sdk/python/feast/templates/snowflake/feature_store.yaml index 3e2e3c3cea..39f266f89f 100644 --- a/sdk/python/feast/templates/snowflake/feature_store.yaml +++ b/sdk/python/feast/templates/snowflake/feature_store.yaml @@ -9,6 +9,7 @@ offline_store: role: SNOWFLAKE_ROLE warehouse: SNOWFLAKE_WAREHOUSE database: SNOWFLAKE_DATABASE +entity_key_serialization_version: 2 online_store: type: snowflake.online account: SNOWFLAKE_DEPLOYMENT_URL From c611eb830623ab936c393a9ec85b096d73a52274 Mon Sep 17 00:00:00 2001 From: Felix Wang Date: Mon, 1 Aug 2022 10:54:21 -0700 Subject: [PATCH 69/73] chore: Remove gcp requirement for local tests (#2972) * Remove unused objects Signed-off-by: Felix Wang * Switch from bigquery to file sources Signed-off-by: Felix Wang * Switch tests using example_feature_repo_1 to use file offline store Signed-off-by: Felix Wang * Disable tests that require gcp Signed-off-by: Felix Wang * Remove duplicate test Signed-off-by: Felix Wang * Remove integration marker Signed-off-by: Felix Wang * Fix snowflake config Signed-off-by: Felix Wang * Fix import Signed-off-by: Felix Wang * Add empty feature repo Signed-off-by: Felix Wang * Fix comments Signed-off-by: Felix Wang * Add new example feature repo Signed-off-by: Felix Wang * Add new feature repo with just feature service Signed-off-by: Felix Wang * Move tests from integration to unit Signed-off-by: Felix Wang --- Makefile | 5 +- .../tests/example_repos/empty_feature_repo.py | 3 + .../example_repos/example_feature_repo_1.py | 28 +- ...ure_repo_with_driver_stats_feature_view.py | 30 ++ ...ample_feature_repo_with_feature_service.py | 36 +++ .../integration/e2e/test_universal_e2e.py | 70 ----- .../feature_repos/repo_configuration.py | 10 +- .../registration/test_universal_cli.py | 2 +- .../unit/cli/test_cli_apply_duplicates.py | 24 +- .../unit/local_feast_tests/test_e2e_local.py | 41 +++ .../test_feature_service_apply.py | 25 ++ .../test_feature_service_read.py | 7 +- .../test_stream_feature_view_apply.py | 280 ++++++++---------- .../online_store/test_online_retrieval.py | 8 +- .../tests/utils/basic_read_write_test.py | 5 +- 15 files changed, 298 insertions(+), 276 deletions(-) create mode 100644 sdk/python/tests/example_repos/empty_feature_repo.py create mode 100644 sdk/python/tests/example_repos/example_feature_repo_with_driver_stats_feature_view.py create mode 100644 sdk/python/tests/example_repos/example_feature_repo_with_feature_service.py create mode 100644 sdk/python/tests/unit/local_feast_tests/test_feature_service_apply.py rename sdk/python/tests/{integration => unit}/online_store/test_online_retrieval.py (98%) diff --git a/Makefile b/Makefile index 288da43fcd..6e12f8252c 100644 --- a/Makefile +++ b/Makefile @@ -78,7 +78,10 @@ test-python-integration-local: -k "not test_apply_entity_integration and \ not test_apply_feature_view_integration and \ not test_apply_data_source_integration and \ - not test_lambda_materialization" \ + not test_lambda_materialization and \ + not test_feature_view_inference_success and \ + not test_update_file_data_source_with_inferred_event_timestamp_col and \ + not test_nullable_online_store" \ sdk/python/tests \ ) || echo "This script uses Docker, and it isn't running - please start the Docker Daemon and try again!"; diff --git a/sdk/python/tests/example_repos/empty_feature_repo.py b/sdk/python/tests/example_repos/empty_feature_repo.py new file mode 100644 index 0000000000..8353c2a7fd --- /dev/null +++ b/sdk/python/tests/example_repos/empty_feature_repo.py @@ -0,0 +1,3 @@ +# This example feature repo is deliberately left empty. It should be used for tests that do not need +# any feature views or other objects (for example, a test that checks that a feature service can be +# applied and retrieved correctly). diff --git a/sdk/python/tests/example_repos/example_feature_repo_1.py b/sdk/python/tests/example_repos/example_feature_repo_1.py index 5abd9fb18a..200065f0b1 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_1.py +++ b/sdk/python/tests/example_repos/example_feature_repo_1.py @@ -1,34 +1,26 @@ from datetime import timedelta -from feast import BigQuerySource, Entity, FeatureService, FeatureView, Field, PushSource +from feast import Entity, FeatureService, FeatureView, Field, FileSource, PushSource from feast.types import Float32, Int64, String -driver_locations_source = BigQuerySource( - table="feast-oss.public.drivers", - timestamp_field="event_timestamp", - created_timestamp_column="created_timestamp", -) - -driver_locations_source_query = BigQuerySource( - query="SELECT * from feast-oss.public.drivers", - timestamp_field="event_timestamp", - created_timestamp_column="created_timestamp", -) +# Note that file source paths are not validated, so there doesn't actually need to be any data +# at the paths for these file sources. Since these paths are effectively fake, this example +# feature repo should not be used for historical retrieval. -driver_locations_source_query_2 = BigQuerySource( - query="SELECT lat * 2 FROM feast-oss.public.drivers", +driver_locations_source = FileSource( + path="data/driver_locations.parquet", timestamp_field="event_timestamp", created_timestamp_column="created_timestamp", ) -customer_profile_source = BigQuerySource( +customer_profile_source = FileSource( name="customer_profile_source", - table="feast-oss.public.customers", + path="data/customer_profiles.parquet", timestamp_field="event_timestamp", ) -customer_driver_combined_source = BigQuerySource( - table="feast-oss.public.customer_driver", +customer_driver_combined_source = FileSource( + path="data/customer_driver_combined.parquet", timestamp_field="event_timestamp", ) diff --git a/sdk/python/tests/example_repos/example_feature_repo_with_driver_stats_feature_view.py b/sdk/python/tests/example_repos/example_feature_repo_with_driver_stats_feature_view.py new file mode 100644 index 0000000000..b6525abbfc --- /dev/null +++ b/sdk/python/tests/example_repos/example_feature_repo_with_driver_stats_feature_view.py @@ -0,0 +1,30 @@ +from datetime import timedelta + +from feast import Entity, FeatureView, Field, FileSource +from feast.types import Float32, Int32, Int64 + +driver_hourly_stats = FileSource( + path="data/driver_stats.parquet", # Fake path + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + +driver = Entity( + name="driver_id", + description="driver id", +) + +driver_hourly_stats_view = FeatureView( + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + Field(name="driver_id", dtype=Int32), + ], + online=True, + source=driver_hourly_stats, + tags={}, +) diff --git a/sdk/python/tests/example_repos/example_feature_repo_with_feature_service.py b/sdk/python/tests/example_repos/example_feature_repo_with_feature_service.py new file mode 100644 index 0000000000..372bd9afb7 --- /dev/null +++ b/sdk/python/tests/example_repos/example_feature_repo_with_feature_service.py @@ -0,0 +1,36 @@ +from datetime import timedelta + +from feast import Entity, FeatureService, FeatureView, Field, FileSource +from feast.types import Float32, Int64, String + +driver_locations_source = FileSource( + path="data/driver_locations.parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", +) + +driver = Entity( + name="driver", # The name is derived from this argument, not object name. + join_keys=["driver_id"], + description="driver id", +) + +driver_locations = FeatureView( + name="driver_locations", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="lat", dtype=Float32), + Field(name="lon", dtype=String), + Field(name="driver_id", dtype=Int64), + ], + online=True, + batch_source=driver_locations_source, + tags={}, +) + +all_drivers_feature_service = FeatureService( + name="driver_locations_service", + features=[driver_locations], + tags={"release": "production"}, +) diff --git a/sdk/python/tests/integration/e2e/test_universal_e2e.py b/sdk/python/tests/integration/e2e/test_universal_e2e.py index 5dc0c042d9..202ae859ae 100644 --- a/sdk/python/tests/integration/e2e/test_universal_e2e.py +++ b/sdk/python/tests/integration/e2e/test_universal_e2e.py @@ -2,13 +2,8 @@ import pytest -from feast import BigQuerySource, Entity, FeatureView, Field -from feast.feature_service import FeatureService -from feast.types import Float32, String from tests.integration.feature_repos.universal.entities import driver from tests.integration.feature_repos.universal.feature_views import driver_feature_view -from tests.utils.basic_read_write_test import basic_rw_test -from tests.utils.cli_repo_creator import CliRunner, get_example_repo from tests.utils.e2e_test_validation import validate_offline_online_store_consistency @@ -32,68 +27,3 @@ def test_e2e_consistency(environment, e2e_data_sources, infer_features): split_dt = df["ts_1"][4].to_pydatetime() - timedelta(seconds=1) validate_offline_online_store_consistency(fs, fv, split_dt) - - -@pytest.mark.integration -def test_partial() -> None: - """ - Add another table to existing repo using partial apply API. Make sure both the table - applied via CLI apply and the new table are passing RW test. - """ - - runner = CliRunner() - with runner.local_repo( - get_example_repo("example_feature_repo_1.py"), "bigquery" - ) as store: - driver = Entity(name="driver", join_keys=["test"]) - - driver_locations_source = BigQuerySource( - table="feast-oss.public.drivers", - timestamp_field="event_timestamp", - created_timestamp_column="created_timestamp", - ) - - driver_locations_100 = FeatureView( - name="driver_locations_100", - entities=[driver], - ttl=timedelta(days=1), - schema=[ - Field(name="lat", dtype=Float32), - Field(name="lon", dtype=String), - Field(name="name", dtype=String), - Field(name="test", dtype=String), - ], - online=True, - batch_source=driver_locations_source, - tags={}, - ) - - store.apply([driver_locations_100]) - - basic_rw_test(store, view_name="driver_locations") - basic_rw_test(store, view_name="driver_locations_100") - - -@pytest.mark.integration -def test_read_pre_applied() -> None: - """ - Read feature values from the FeatureStore using a FeatureService. - """ - runner = CliRunner() - with runner.local_repo( - get_example_repo("example_feature_repo_1.py"), "bigquery" - ) as store: - - assert len(store.list_feature_services()) == 1 - fs = store.get_feature_service("driver_locations_service") - assert len(fs.tags) == 1 - assert fs.tags["release"] == "production" - - fv = store.get_feature_view("driver_locations") - - fs = FeatureService(name="new_feature_service", features=[fv[["lon"]]]) - - store.apply([fs]) - - assert len(store.list_feature_services()) == 2 - store.get_feature_service("new_feature_service") diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 8f1aa51b19..776fff3bb9 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -75,11 +75,11 @@ SNOWFLAKE_CONFIG = { "type": "snowflake.online", - "account": os.environ["SNOWFLAKE_CI_DEPLOYMENT"], - "user": os.environ["SNOWFLAKE_CI_USER"], - "password": os.environ["SNOWFLAKE_CI_PASSWORD"], - "role": os.environ["SNOWFLAKE_CI_ROLE"], - "warehouse": os.environ["SNOWFLAKE_CI_WAREHOUSE"], + "account": os.environ.get("SNOWFLAKE_CI_DEPLOYMENT", ""), + "user": os.environ.get("SNOWFLAKE_CI_USER", ""), + "password": os.environ.get("SNOWFLAKE_CI_PASSWORD", ""), + "role": os.environ.get("SNOWFLAKE_CI_ROLE", ""), + "warehouse": os.environ.get("SNOWFLAKE_CI_WAREHOUSE", ""), "database": "FEAST", "schema": "ONLINE", } diff --git a/sdk/python/tests/integration/registration/test_universal_cli.py b/sdk/python/tests/integration/registration/test_universal_cli.py index 3e77f74edd..1fb82ce59f 100644 --- a/sdk/python/tests/integration/registration/test_universal_cli.py +++ b/sdk/python/tests/integration/registration/test_universal_cli.py @@ -159,7 +159,7 @@ def test_nullable_online_store(test_nullable_online_store) -> None: repo_config.write_text(dedent(feature_store_yaml)) repo_example = repo_path / "example.py" - repo_example.write_text(get_example_repo("example_feature_repo_1.py")) + repo_example.write_text(get_example_repo("empty_feature_repo.py")) result = runner.run(["apply"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(0) finally: diff --git a/sdk/python/tests/unit/cli/test_cli_apply_duplicates.py b/sdk/python/tests/unit/cli/test_cli_apply_duplicates.py index f61a46516e..998662781e 100644 --- a/sdk/python/tests/unit/cli/test_cli_apply_duplicates.py +++ b/sdk/python/tests/unit/cli/test_cli_apply_duplicates.py @@ -49,9 +49,8 @@ def run_simple_apply_test(example_repo_file_name: str, expected_error: bytes): def test_cli_apply_imported_featureview() -> None: """ - Test apply feature views with duplicated names and single py file in a feature repo using CLI + Tests that applying a feature view imported from a separate Python file is successful. """ - with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: runner = CliRunner() # Construct an example repo in a temporary dir @@ -72,8 +71,11 @@ def test_cli_apply_imported_featureview() -> None: ) ) + # Import feature view from an existing file so it exists in two files. repo_example = repo_path / "example.py" - repo_example.write_text(get_example_repo("example_feature_repo_2.py")) + repo_example.write_text( + get_example_repo("example_feature_repo_with_driver_stats_feature_view.py") + ) repo_example_2 = repo_path / "example_2.py" repo_example_2.write_text( "from example import driver_hourly_stats_view\n" @@ -92,9 +94,9 @@ def test_cli_apply_imported_featureview() -> None: def test_cli_apply_imported_featureview_with_duplication() -> None: """ - Test apply feature views with duplicated names and single py file in a feature repo using CLI + Tests that applying feature views with duplicated names is not possible, even if one of the + duplicated feature views is imported from another file. """ - with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: runner = CliRunner() # Construct an example repo in a temporary dir @@ -115,8 +117,11 @@ def test_cli_apply_imported_featureview_with_duplication() -> None: ) ) + # Import feature view with duplicated name to try breaking the deduplication logic. repo_example = repo_path / "example.py" - repo_example.write_text(get_example_repo("example_feature_repo_2.py")) + repo_example.write_text( + get_example_repo("example_feature_repo_with_driver_stats_feature_view.py") + ) repo_example_2 = repo_path / "example_2.py" repo_example_2.write_text( "from datetime import timedelta\n" @@ -147,7 +152,6 @@ def test_cli_apply_duplicated_featureview_names_multiple_py_files() -> None: """ Test apply feature views with duplicated names from multiple py files in a feature repo using CLI """ - with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: runner = CliRunner() # Construct an example repo in a temporary dir @@ -170,7 +174,11 @@ def test_cli_apply_duplicated_featureview_names_multiple_py_files() -> None: # Create multiple py files containing the same feature view name for i in range(3): repo_example = repo_path / f"example{i}.py" - repo_example.write_text(get_example_repo("example_feature_repo_2.py")) + repo_example.write_text( + get_example_repo( + "example_feature_repo_with_driver_stats_feature_view.py" + ) + ) rc, output = runner.run_with_output(["apply"], cwd=repo_path) assert ( diff --git a/sdk/python/tests/unit/local_feast_tests/test_e2e_local.py b/sdk/python/tests/unit/local_feast_tests/test_e2e_local.py index 5fbedf944d..97d6463f5f 100644 --- a/sdk/python/tests/unit/local_feast_tests/test_e2e_local.py +++ b/sdk/python/tests/unit/local_feast_tests/test_e2e_local.py @@ -5,11 +5,14 @@ import pandas as pd +from feast import Entity, FeatureView, Field, FileSource from feast.driver_test_data import ( create_driver_hourly_stats_df, create_global_daily_stats_df, ) from feast.feature_store import FeatureStore +from feast.types import Float32, String +from tests.utils.basic_read_write_test import basic_rw_test from tests.utils.cli_repo_creator import CliRunner, get_example_repo from tests.utils.feature_records import validate_online_features @@ -120,3 +123,41 @@ def _test_materialize_and_online_retrieval( assert r.returncode == 0, f"stdout: {r.stdout}\n stderr: {r.stderr}" validate_online_features(store, driver_df, end_date) + + +def test_partial() -> None: + """ + Add another table to existing repo using partial apply API. Make sure both the table + applied via CLI apply and the new table are passing RW test. + """ + runner = CliRunner() + with runner.local_repo( + get_example_repo("example_feature_repo_1.py"), "file" + ) as store: + driver = Entity(name="driver", join_keys=["test"]) + + driver_locations_source = FileSource( + path="data/driver_locations.parquet", # Fake path + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", + ) + + driver_locations_100 = FeatureView( + name="driver_locations_100", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="lat", dtype=Float32), + Field(name="lon", dtype=String), + Field(name="name", dtype=String), + Field(name="test", dtype=String), + ], + online=True, + batch_source=driver_locations_source, + tags={}, + ) + + store.apply([driver_locations_100]) + + basic_rw_test(store, view_name="driver_locations") + basic_rw_test(store, view_name="driver_locations_100") diff --git a/sdk/python/tests/unit/local_feast_tests/test_feature_service_apply.py b/sdk/python/tests/unit/local_feast_tests/test_feature_service_apply.py new file mode 100644 index 0000000000..dc642a6e3c --- /dev/null +++ b/sdk/python/tests/unit/local_feast_tests/test_feature_service_apply.py @@ -0,0 +1,25 @@ +from feast.feature_service import FeatureService +from tests.utils.cli_repo_creator import CliRunner, get_example_repo + + +def test_read_pre_applied() -> None: + """ + Read feature values from the FeatureStore using a FeatureService. + """ + runner = CliRunner() + with runner.local_repo( + get_example_repo("example_feature_repo_with_feature_service.py"), "file" + ) as store: + assert len(store.list_feature_services()) == 1 + fs = store.get_feature_service("driver_locations_service") + assert len(fs.tags) == 1 + assert fs.tags["release"] == "production" + + fv = store.get_feature_view("driver_locations") + + fs = FeatureService(name="new_feature_service", features=[fv[["lon"]]]) + + store.apply([fs]) + + assert len(store.list_feature_services()) == 2 + store.get_feature_service("new_feature_service") diff --git a/sdk/python/tests/unit/local_feast_tests/test_feature_service_read.py b/sdk/python/tests/unit/local_feast_tests/test_feature_service_read.py index 72392b0396..2b5b311dc9 100644 --- a/sdk/python/tests/unit/local_feast_tests/test_feature_service_read.py +++ b/sdk/python/tests/unit/local_feast_tests/test_feature_service_read.py @@ -1,20 +1,15 @@ -import pytest - from tests.utils.basic_read_write_test import basic_rw_test from tests.utils.cli_repo_creator import CliRunner, get_example_repo -@pytest.mark.integration def test_feature_service_read() -> None: """ Read feature values from the FeatureStore using a FeatureService. """ - runner = CliRunner() with runner.local_repo( - get_example_repo("example_feature_repo_1.py"), "bigquery" + get_example_repo("example_feature_repo_with_feature_service.py"), "file" ) as store: - basic_rw_test( store, view_name="driver_locations", diff --git a/sdk/python/tests/unit/local_feast_tests/test_stream_feature_view_apply.py b/sdk/python/tests/unit/local_feast_tests/test_stream_feature_view_apply.py index ca54d882b5..0def3cc783 100644 --- a/sdk/python/tests/unit/local_feast_tests/test_stream_feature_view_apply.py +++ b/sdk/python/tests/unit/local_feast_tests/test_stream_feature_view_apply.py @@ -1,14 +1,8 @@ -import os -import tempfile -from datetime import datetime, timedelta +from datetime import timedelta from feast.aggregation import Aggregation from feast.data_format import AvroFormat from feast.data_source import KafkaSource -from feast.driver_test_data import ( - create_driver_hourly_stats_df, - create_global_daily_stats_df, -) from feast.entity import Entity from feast.field import Field from feast.stream_feature_view import stream_feature_view @@ -22,82 +16,65 @@ def test_apply_stream_feature_view(simple_dataset_1) -> None: Test apply of StreamFeatureView. """ runner = CliRunner() - with tempfile.TemporaryDirectory() as data_dir: - # Generate test data. - end_date = datetime.now().replace(microsecond=0, second=0, minute=0) - start_date = end_date - timedelta(days=15) - - driver_entities = [1001, 1002, 1003, 1004, 1005] - driver_df = create_driver_hourly_stats_df(driver_entities, start_date, end_date) - driver_stats_path = os.path.join(data_dir, "driver_stats.parquet") - driver_df.to_parquet(path=driver_stats_path, allow_truncated_timestamps=True) - - global_df = create_global_daily_stats_df(start_date, end_date) - global_stats_path = os.path.join(data_dir, "global_stats.parquet") - global_df.to_parquet(path=global_stats_path, allow_truncated_timestamps=True) - - with runner.local_repo( - get_example_repo("example_feature_repo_2.py") - .replace("%PARQUET_PATH%", driver_stats_path) - .replace("%PARQUET_PATH_GLOBAL%", global_stats_path), - "file", - ) as fs, prep_file_source( - df=simple_dataset_1, timestamp_field="ts_1" - ) as file_source: - entity = Entity(name="driver_entity", join_keys=["test_key"]) - - stream_source = KafkaSource( - name="kafka", - timestamp_field="event_timestamp", - kafka_bootstrap_servers="", - message_format=AvroFormat(""), - topic="topic", - batch_source=file_source, - watermark_delay_threshold=timedelta(days=1), - ) - - @stream_feature_view( - entities=[entity], - ttl=timedelta(days=30), - owner="test@example.com", - online=True, - schema=[Field(name="dummy_field", dtype=Float32)], - description="desc", - aggregations=[ - Aggregation( - column="dummy_field", - function="max", - time_window=timedelta(days=1), - ), - Aggregation( - column="dummy_field2", - function="count", - time_window=timedelta(days=24), - ), - ], - timestamp_field="event_timestamp", - mode="spark", - source=stream_source, - tags={}, - ) - def simple_sfv(df): - return df - - fs.apply([entity, simple_sfv]) - - stream_feature_views = fs.list_stream_feature_views() - assert len(stream_feature_views) == 1 - assert stream_feature_views[0] == simple_sfv - - features = fs.get_online_features( - features=["simple_sfv:dummy_field"], - entity_rows=[{"test_key": 1001}], - ).to_dict(include_event_timestamps=True) - - assert "test_key" in features - assert features["test_key"] == [1001] - assert "dummy_field" in features - assert features["dummy_field"] == [None] + with runner.local_repo( + get_example_repo("empty_feature_repo.py"), "file" + ) as fs, prep_file_source( + df=simple_dataset_1, timestamp_field="ts_1" + ) as file_source: + entity = Entity(name="driver_entity", join_keys=["test_key"]) + + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=file_source, + watermark_delay_threshold=timedelta(days=1), + ) + + @stream_feature_view( + entities=[entity], + ttl=timedelta(days=30), + owner="test@example.com", + online=True, + schema=[Field(name="dummy_field", dtype=Float32)], + description="desc", + aggregations=[ + Aggregation( + column="dummy_field", + function="max", + time_window=timedelta(days=1), + ), + Aggregation( + column="dummy_field2", + function="count", + time_window=timedelta(days=24), + ), + ], + timestamp_field="event_timestamp", + mode="spark", + source=stream_source, + tags={}, + ) + def simple_sfv(df): + return df + + fs.apply([entity, simple_sfv]) + + stream_feature_views = fs.list_stream_feature_views() + assert len(stream_feature_views) == 1 + assert stream_feature_views[0] == simple_sfv + + features = fs.get_online_features( + features=["simple_sfv:dummy_field"], + entity_rows=[{"test_key": 1001}], + ).to_dict(include_event_timestamps=True) + + assert "test_key" in features + assert features["test_key"] == [1001] + assert "dummy_field" in features + assert features["dummy_field"] == [None] def test_stream_feature_view_udf(simple_dataset_1) -> None: @@ -105,85 +82,68 @@ def test_stream_feature_view_udf(simple_dataset_1) -> None: Test apply of StreamFeatureView udfs are serialized correctly and usable. """ runner = CliRunner() - with tempfile.TemporaryDirectory() as data_dir: - # Generate test data. - end_date = datetime.now().replace(microsecond=0, second=0, minute=0) - start_date = end_date - timedelta(days=15) - - driver_entities = [1001, 1002, 1003, 1004, 1005] - driver_df = create_driver_hourly_stats_df(driver_entities, start_date, end_date) - driver_stats_path = os.path.join(data_dir, "driver_stats.parquet") - driver_df.to_parquet(path=driver_stats_path, allow_truncated_timestamps=True) - - global_df = create_global_daily_stats_df(start_date, end_date) - global_stats_path = os.path.join(data_dir, "global_stats.parquet") - global_df.to_parquet(path=global_stats_path, allow_truncated_timestamps=True) - - with runner.local_repo( - get_example_repo("example_feature_repo_2.py") - .replace("%PARQUET_PATH%", driver_stats_path) - .replace("%PARQUET_PATH_GLOBAL%", global_stats_path), - "file", - ) as fs, prep_file_source( - df=simple_dataset_1, timestamp_field="ts_1" - ) as file_source: - entity = Entity(name="driver_entity", join_keys=["test_key"]) - - stream_source = KafkaSource( - name="kafka", - timestamp_field="event_timestamp", - kafka_bootstrap_servers="", - message_format=AvroFormat(""), - topic="topic", - batch_source=file_source, - watermark_delay_threshold=timedelta(days=1), - ) - - @stream_feature_view( - entities=[entity], - ttl=timedelta(days=30), - owner="test@example.com", - online=True, - schema=[Field(name="dummy_field", dtype=Float32)], - description="desc", - aggregations=[ - Aggregation( - column="dummy_field", - function="max", - time_window=timedelta(days=1), - ), - Aggregation( - column="dummy_field2", - function="count", - time_window=timedelta(days=24), - ), - ], - timestamp_field="event_timestamp", - mode="spark", - source=stream_source, - tags={}, - ) - def pandas_view(pandas_df): - import pandas as pd - - assert type(pandas_df) == pd.DataFrame - df = pandas_df.transform(lambda x: x + 10, axis=1) - df.insert(2, "C", [20.2, 230.0, 34.0], True) - return df - + with runner.local_repo( + get_example_repo("empty_feature_repo.py"), "file" + ) as fs, prep_file_source( + df=simple_dataset_1, timestamp_field="ts_1" + ) as file_source: + entity = Entity(name="driver_entity", join_keys=["test_key"]) + + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=file_source, + watermark_delay_threshold=timedelta(days=1), + ) + + @stream_feature_view( + entities=[entity], + ttl=timedelta(days=30), + owner="test@example.com", + online=True, + schema=[Field(name="dummy_field", dtype=Float32)], + description="desc", + aggregations=[ + Aggregation( + column="dummy_field", + function="max", + time_window=timedelta(days=1), + ), + Aggregation( + column="dummy_field2", + function="count", + time_window=timedelta(days=24), + ), + ], + timestamp_field="event_timestamp", + mode="spark", + source=stream_source, + tags={}, + ) + def pandas_view(pandas_df): import pandas as pd - fs.apply([entity, pandas_view]) + assert type(pandas_df) == pd.DataFrame + df = pandas_df.transform(lambda x: x + 10, axis=1) + df.insert(2, "C", [20.2, 230.0, 34.0], True) + return df + + import pandas as pd + + fs.apply([entity, pandas_view]) - stream_feature_views = fs.list_stream_feature_views() - assert len(stream_feature_views) == 1 - assert stream_feature_views[0] == pandas_view + stream_feature_views = fs.list_stream_feature_views() + assert len(stream_feature_views) == 1 + assert stream_feature_views[0] == pandas_view - sfv = stream_feature_views[0] + sfv = stream_feature_views[0] - df = pd.DataFrame({"A": [1, 2, 3], "B": [10, 20, 30]}) - new_df = sfv.udf(df) - expected_df = pd.DataFrame( - {"A": [11, 12, 13], "B": [20, 30, 40], "C": [20.2, 230.0, 34.0]} - ) - assert new_df.equals(expected_df) + df = pd.DataFrame({"A": [1, 2, 3], "B": [10, 20, 30]}) + new_df = sfv.udf(df) + expected_df = pd.DataFrame( + {"A": [11, 12, 13], "B": [20, 30, 40], "C": [20.2, 230.0, 34.0]} + ) + assert new_df.equals(expected_df) diff --git a/sdk/python/tests/integration/online_store/test_online_retrieval.py b/sdk/python/tests/unit/online_store/test_online_retrieval.py similarity index 98% rename from sdk/python/tests/integration/online_store/test_online_retrieval.py rename to sdk/python/tests/unit/online_store/test_online_retrieval.py index 988af6e7e9..731230a5f6 100644 --- a/sdk/python/tests/integration/online_store/test_online_retrieval.py +++ b/sdk/python/tests/unit/online_store/test_online_retrieval.py @@ -14,17 +14,15 @@ from tests.utils.cli_repo_creator import CliRunner, get_example_repo -@pytest.mark.integration def test_online() -> None: """ Test reading from the online store in local mode. """ runner = CliRunner() with runner.local_repo( - get_example_repo("example_feature_repo_1.py"), "bigquery" + get_example_repo("example_feature_repo_1.py"), "file" ) as store: # Write some data to two tables - driver_locations_fv = store.get_feature_view(name="driver_locations") customer_profile_fv = store.get_feature_view(name="customer_profile") customer_driver_combined_fv = store.get_feature_view( @@ -251,13 +249,11 @@ def test_online() -> None: os.rename(store.config.registry + "_fake", store.config.registry) -@pytest.mark.integration def test_online_to_df(): """ Test dataframe conversion. Make sure the response columns and rows are the same order as the request. """ - driver_ids = [1, 2, 3] customer_ids = [4, 5, 6] name = "foo" @@ -268,7 +264,7 @@ def test_online_to_df(): runner = CliRunner() with runner.local_repo( - get_example_repo("example_feature_repo_1.py"), "bigquery" + get_example_repo("example_feature_repo_1.py"), "file" ) as store: # Write three tables to online store driver_locations_fv = store.get_feature_view(name="driver_locations") diff --git a/sdk/python/tests/utils/basic_read_write_test.py b/sdk/python/tests/utils/basic_read_write_test.py index 39846cd2ad..5a93a05a1f 100644 --- a/sdk/python/tests/utils/basic_read_write_test.py +++ b/sdk/python/tests/utils/basic_read_write_test.py @@ -11,7 +11,10 @@ def basic_rw_test( ) -> None: """ This is a provider-independent test suite for reading and writing from the online store, to - be used by provider-specific tests. + be used by provider-specific tests. + + The specified feature view must have exactly two features: one named 'lat' with type Float32 + and one with name 'lon' with type String. """ table = store.get_feature_view(name=view_name) From fc447eb3d0345dba6a45cdf5b1c1c2e982766cb9 Mon Sep 17 00:00:00 2001 From: Kevin Zhang Date: Mon, 1 Aug 2022 15:44:21 -0700 Subject: [PATCH 70/73] fix: Move gcp back to 1.47.0 since grpcio-tools 1.48.0 got yanked from pypi (#2990) Fix Signed-off-by: Kevin Zhang --- .github/workflows/build_wheels.yml | 2 +- Makefile | 2 +- README.md | 11 ++--- .../requirements/py3.10-ci-requirements.txt | 36 ++++++++-------- .../requirements/py3.10-requirements.txt | 10 ++--- .../requirements/py3.8-ci-requirements.txt | 38 ++++++++--------- .../requirements/py3.8-requirements.txt | 12 +++--- .../requirements/py3.9-ci-requirements.txt | 42 +++++++++---------- .../requirements/py3.9-requirements.txt | 10 ++--- 9 files changed, 75 insertions(+), 88 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 096bdb5b81..c47a8ec5c3 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -204,7 +204,7 @@ jobs: env: COMPILE_GO: "True" run: | - pip install 'grpcio-tools==1.48.0' 'pybindgen==0.22.0' + pip install 'grpcio-tools==1.47.0' 'pybindgen==0.22.0' go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.26.0 go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.1.0 pip install dist/*tar.gz diff --git a/Makefile b/Makefile index 6e12f8252c..ee2b7c8f1b 100644 --- a/Makefile +++ b/Makefile @@ -199,7 +199,7 @@ install-go-ci-dependencies: python -m pip install pybindgen==0.22.0 protobuf==3.20.1 install-protoc-dependencies: - pip install grpcio-tools==1.48.0 mypy-protobuf==3.1.0 + pip install grpcio-tools==1.47.0 mypy-protobuf==3.1.0 compile-protos-go: install-go-proto-dependencies install-protoc-dependencies python setup.py build_go_protos diff --git a/README.md b/README.md index d367f291fa..df42737811 100644 --- a/README.md +++ b/README.md @@ -18,14 +18,7 @@ ## Overview -Feast (**Fea**ture **St**ore) is an open source feature store for machine learning. Feast is the fastest path to manage existing infrastructure to productionize analytic data for model training and online inference. - - -Feast allows ML platform teams to: - -* **Make features consistently available for training and serving** by managing an _offline store_ (to process historical data for scale-out batch scoring or model training), a low-latency _online store_ (to power real-time prediction)_,_ and a battle-tested _feature server_ (for serving pre-computed features online). -* **Avoid data leakage** by generating point-in-time correct feature sets so data scientists can focus on feature engineering rather than debugging error-prone dataset joining logic. This ensure that future feature values do not leak to models during training. -* **Decouple ML from data infrastructure** by providing a single data access layer that abstracts feature storage from feature retrieval, ensuring models remain portable as you move from training models to serving models, from batch models to realtime models, and from one data infra system to another. +Feast is an open source feature store for machine learning. Feast is the fastest path to productionizing analytic data for model training and online inference. Please see our [documentation](https://docs.feast.dev/) for more information about the project. @@ -169,6 +162,7 @@ The list below contains the functionality that contributors are planning to deve * [x] [In-memory / Pandas](https://docs.feast.dev/reference/offline-stores/file) * [x] [Custom offline store support](https://docs.feast.dev/how-to-guides/adding-a-new-offline-store) * **Online Stores** + * [x] [Snowflake](https://docs.feast.dev/reference/online-stores/snowflake) * [x] [DynamoDB](https://docs.feast.dev/reference/online-stores/dynamodb) * [x] [Redis](https://docs.feast.dev/reference/online-stores/redis) * [x] [Datastore](https://docs.feast.dev/reference/online-stores/datastore) @@ -203,6 +197,7 @@ The list below contains the functionality that contributors are planning to deve * [x] DataHub integration (see [DataHub Feast docs](https://datahubproject.io/docs/generated/ingestion/sources/feast/)) * [x] Feast Web UI (Alpha release. See [docs](https://docs.feast.dev/reference/alpha-web-ui)) + ## 🎓 Important Resources Please refer to the official documentation at [Documentation](https://docs.feast.dev/) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 591787a27b..4ff99c247f 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -174,7 +174,7 @@ executing==0.9.1 # via stack-data fastapi==0.79.0 # via feast (setup.py) -fastavro==1.5.3 +fastavro==1.5.4 # via # feast (setup.py) # pandavro @@ -186,7 +186,7 @@ firebase-admin==5.2.0 # via feast (setup.py) fissix==21.11.13 # via bowler -flake8==4.0.1 +flake8==5.0.2 # via feast (setup.py) frozenlist==1.3.0 # via @@ -261,9 +261,7 @@ googleapis-common-protos==1.56.4 # tensorflow-metadata great-expectations==0.14.13 # via feast (setup.py) -greenlet==1.1.2 - # via sqlalchemy -grpcio==1.48.0 +grpcio==1.47.0 # via # feast (setup.py) # google-api-core @@ -272,13 +270,13 @@ grpcio==1.48.0 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-reflection==1.48.0 +grpcio-reflection==1.47.0 # via feast (setup.py) -grpcio-status==1.48.0 +grpcio-status==1.47.0 # via google-api-core -grpcio-testing==1.48.0 +grpcio-testing==1.47.0 # via feast (setup.py) -grpcio-tools==1.48.0 +grpcio-tools==1.47.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -329,7 +327,7 @@ jsonpatch==1.32 # via great-expectations jsonpointer==2.3 # via jsonpatch -jsonschema==4.8.0 +jsonschema==4.9.0 # via # altair # feast (setup.py) @@ -346,7 +344,7 @@ markupsafe==2.1.1 # werkzeug matplotlib-inline==0.1.3 # via ipython -mccabe==0.6.1 +mccabe==0.7.0 # via flake8 minio==7.1.0 # via feast (setup.py) @@ -438,7 +436,7 @@ pathspec==0.9.0 # via black pbr==5.9.0 # via mock -pep517==0.12.0 +pep517==0.13.0 # via build pexpect==4.8.0 # via ipython @@ -513,7 +511,7 @@ pyasn1-modules==0.2.8 # via google-auth pybindgen==0.22.1 # via feast (setup.py) -pycodestyle==2.8.0 +pycodestyle==2.9.0 # via flake8 pycparser==2.21 # via cffi @@ -523,7 +521,7 @@ pydantic==1.9.1 # via # fastapi # feast (setup.py) -pyflakes==2.4.0 +pyflakes==2.5.0 # via flake8 pygments==2.12.0 # via @@ -636,7 +634,7 @@ s3fs==2022.1.0 # via feast (setup.py) s3transfer==0.5.2 # via boto3 -scipy==1.8.1 +scipy==1.9.0 # via great-expectations six==1.16.0 # via @@ -735,15 +733,15 @@ types-pytz==2022.1.2 # via feast (setup.py) types-pyyaml==6.0.11 # via feast (setup.py) -types-redis==4.3.11 +types-redis==4.3.13 # via feast (setup.py) -types-requests==2.28.5 +types-requests==2.28.6 # via feast (setup.py) types-setuptools==63.2.2 # via feast (setup.py) types-tabulate==0.8.11 # via feast (setup.py) -types-urllib3==1.26.17 +types-urllib3==1.26.20 # via types-requests typing-extensions==4.3.0 # via @@ -793,7 +791,7 @@ wrapt==1.14.1 # testcontainers xmltodict==0.13.0 # via moto -yarl==1.7.2 +yarl==1.8.0 # via aiohttp zipp==3.8.1 # via importlib-metadata diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index f86636e227..8ae219f1fe 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -40,7 +40,7 @@ dill==0.3.5.1 # via feast (setup.py) fastapi==0.79.0 # via feast (setup.py) -fastavro==1.5.3 +fastavro==1.5.4 # via # feast (setup.py) # pandavro @@ -57,13 +57,11 @@ googleapis-common-protos==1.56.4 # feast (setup.py) # google-api-core # tensorflow-metadata -greenlet==1.1.2 - # via sqlalchemy -grpcio==1.48.0 +grpcio==1.47.0 # via # feast (setup.py) # grpcio-reflection -grpcio-reflection==1.48.0 +grpcio-reflection==1.47.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -75,7 +73,7 @@ idna==3.3 # requests jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.8.0 +jsonschema==4.9.0 # via feast (setup.py) locket==1.0.0 # via partd diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index a55759ca7d..931a7d1e24 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -178,7 +178,7 @@ executing==0.9.1 # via stack-data fastapi==0.79.0 # via feast (setup.py) -fastavro==1.5.3 +fastavro==1.5.4 # via # feast (setup.py) # pandavro @@ -190,7 +190,7 @@ firebase-admin==5.2.0 # via feast (setup.py) fissix==21.11.13 # via bowler -flake8==4.0.1 +flake8==5.0.2 # via feast (setup.py) frozenlist==1.3.0 # via @@ -265,9 +265,7 @@ googleapis-common-protos==1.56.4 # tensorflow-metadata great-expectations==0.14.13 # via feast (setup.py) -greenlet==1.1.2 - # via sqlalchemy -grpcio==1.48.0 +grpcio==1.47.0 # via # feast (setup.py) # google-api-core @@ -276,13 +274,13 @@ grpcio==1.48.0 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-reflection==1.48.0 +grpcio-reflection==1.47.0 # via feast (setup.py) -grpcio-status==1.48.0 +grpcio-status==1.47.0 # via google-api-core -grpcio-testing==1.48.0 +grpcio-testing==1.47.0 # via feast (setup.py) -grpcio-tools==1.48.0 +grpcio-tools==1.47.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -335,7 +333,7 @@ jsonpatch==1.32 # via great-expectations jsonpointer==2.3 # via jsonpatch -jsonschema==4.8.0 +jsonschema==4.9.0 # via # altair # feast (setup.py) @@ -352,7 +350,7 @@ markupsafe==2.1.1 # werkzeug matplotlib-inline==0.1.3 # via ipython -mccabe==0.6.1 +mccabe==0.7.0 # via flake8 minio==7.1.0 # via feast (setup.py) @@ -444,7 +442,7 @@ pathspec==0.9.0 # via black pbr==5.9.0 # via mock -pep517==0.12.0 +pep517==0.13.0 # via build pexpect==4.8.0 # via ipython @@ -452,6 +450,8 @@ pickleshare==0.7.5 # via ipython pip-tools==6.8.0 # via feast (setup.py) +pkgutil-resolve-name==1.3.10 + # via jsonschema platformdirs==2.5.2 # via # black @@ -519,7 +519,7 @@ pyasn1-modules==0.2.8 # via google-auth pybindgen==0.22.1 # via feast (setup.py) -pycodestyle==2.8.0 +pycodestyle==2.9.0 # via flake8 pycparser==2.21 # via cffi @@ -529,7 +529,7 @@ pydantic==1.9.1 # via # fastapi # feast (setup.py) -pyflakes==2.4.0 +pyflakes==2.5.0 # via flake8 pygments==2.12.0 # via @@ -644,7 +644,7 @@ s3fs==2022.1.0 # via feast (setup.py) s3transfer==0.5.2 # via boto3 -scipy==1.8.1 +scipy==1.9.0 # via great-expectations six==1.16.0 # via @@ -743,15 +743,15 @@ types-pytz==2022.1.2 # via feast (setup.py) types-pyyaml==6.0.11 # via feast (setup.py) -types-redis==4.3.11 +types-redis==4.3.13 # via feast (setup.py) -types-requests==2.28.5 +types-requests==2.28.6 # via feast (setup.py) types-setuptools==63.2.2 # via feast (setup.py) types-tabulate==0.8.11 # via feast (setup.py) -types-urllib3==1.26.17 +types-urllib3==1.26.20 # via types-requests typing-extensions==4.3.0 # via @@ -804,7 +804,7 @@ wrapt==1.14.1 # testcontainers xmltodict==0.13.0 # via moto -yarl==1.7.2 +yarl==1.8.0 # via aiohttp zipp==3.8.1 # via diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index e75d23fe95..362780d69e 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -40,7 +40,7 @@ dill==0.3.5.1 # via feast (setup.py) fastapi==0.79.0 # via feast (setup.py) -fastavro==1.5.3 +fastavro==1.5.4 # via # feast (setup.py) # pandavro @@ -57,13 +57,11 @@ googleapis-common-protos==1.56.4 # feast (setup.py) # google-api-core # tensorflow-metadata -greenlet==1.1.2 - # via sqlalchemy -grpcio==1.48.0 +grpcio==1.47.0 # via # feast (setup.py) # grpcio-reflection -grpcio-reflection==1.48.0 +grpcio-reflection==1.47.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -77,7 +75,7 @@ importlib-resources==5.9.0 # via jsonschema jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.8.0 +jsonschema==4.9.0 # via feast (setup.py) locket==1.0.0 # via partd @@ -107,6 +105,8 @@ pandavro==1.5.2 # via feast (setup.py) partd==1.2.0 # via dask +pkgutil-resolve-name==1.3.10 + # via jsonschema proto-plus==1.20.6 # via feast (setup.py) protobuf==3.20.1 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 2b43bde544..5d118a3ae2 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -174,7 +174,7 @@ executing==0.9.1 # via stack-data fastapi==0.79.0 # via feast (setup.py) -fastavro==1.5.3 +fastavro==1.5.4 # via # feast (setup.py) # pandavro @@ -186,7 +186,7 @@ firebase-admin==5.2.0 # via feast (setup.py) fissix==21.11.13 # via bowler -flake8==4.0.1 +flake8==5.0.2 # via feast (setup.py) frozenlist==1.3.0 # via @@ -261,9 +261,7 @@ googleapis-common-protos==1.56.4 # tensorflow-metadata great-expectations==0.14.13 # via feast (setup.py) -greenlet==1.1.2 - # via sqlalchemy -grpcio==1.48.0 +grpcio==1.47.0 # via # feast (setup.py) # google-api-core @@ -272,13 +270,13 @@ grpcio==1.48.0 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-reflection==1.48.0 +grpcio-reflection==1.47.0 # via feast (setup.py) -grpcio-status==1.48.0 +grpcio-status==1.47.0 # via google-api-core -grpcio-testing==1.48.0 +grpcio-testing==1.47.0 # via feast (setup.py) -grpcio-tools==1.48.0 +grpcio-tools==1.47.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -329,7 +327,7 @@ jsonpatch==1.32 # via great-expectations jsonpointer==2.3 # via jsonpatch -jsonschema==4.8.0 +jsonschema==4.9.0 # via # altair # feast (setup.py) @@ -346,7 +344,7 @@ markupsafe==2.1.1 # werkzeug matplotlib-inline==0.1.3 # via ipython -mccabe==0.6.1 +mccabe==0.7.0 # via flake8 minio==7.1.0 # via feast (setup.py) @@ -438,7 +436,7 @@ pathspec==0.9.0 # via black pbr==5.9.0 # via mock -pep517==0.12.0 +pep517==0.13.0 # via build pexpect==4.8.0 # via ipython @@ -513,7 +511,7 @@ pyasn1-modules==0.2.8 # via google-auth pybindgen==0.22.1 # via feast (setup.py) -pycodestyle==2.8.0 +pycodestyle==2.9.0 # via flake8 pycparser==2.21 # via cffi @@ -523,7 +521,7 @@ pydantic==1.9.1 # via # fastapi # feast (setup.py) -pyflakes==2.4.0 +pyflakes==2.5.0 # via flake8 pygments==2.12.0 # via @@ -630,15 +628,15 @@ responses==0.21.0 # via moto rsa==4.9 # via google-auth -ruamel.yaml==0.17.17 +ruamel-yaml==0.17.17 # via great-expectations -ruamel.yaml.clib==0.2.6 - # via ruamel.yaml +ruamel-yaml-clib==0.2.6 + # via ruamel-yaml s3fs==2022.1.0 # via feast (setup.py) s3transfer==0.5.2 # via boto3 -scipy==1.8.1 +scipy==1.9.0 # via great-expectations six==1.16.0 # via @@ -737,15 +735,15 @@ types-pytz==2022.1.2 # via feast (setup.py) types-pyyaml==6.0.11 # via feast (setup.py) -types-redis==4.3.11 +types-redis==4.3.13 # via feast (setup.py) -types-requests==2.28.5 +types-requests==2.28.6 # via feast (setup.py) types-setuptools==63.2.2 # via feast (setup.py) types-tabulate==0.8.11 # via feast (setup.py) -types-urllib3==1.26.17 +types-urllib3==1.26.20 # via types-requests typing-extensions==4.3.0 # via @@ -798,7 +796,7 @@ wrapt==1.14.1 # testcontainers xmltodict==0.13.0 # via moto -yarl==1.7.2 +yarl==1.8.0 # via aiohttp zipp==3.8.1 # via importlib-metadata diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 593e080725..1ef60c531a 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -40,7 +40,7 @@ dill==0.3.5.1 # via feast (setup.py) fastapi==0.79.0 # via feast (setup.py) -fastavro==1.5.3 +fastavro==1.5.4 # via # feast (setup.py) # pandavro @@ -57,13 +57,11 @@ googleapis-common-protos==1.56.4 # feast (setup.py) # google-api-core # tensorflow-metadata -greenlet==1.1.2 - # via sqlalchemy -grpcio==1.48.0 +grpcio==1.47.0 # via # feast (setup.py) # grpcio-reflection -grpcio-reflection==1.48.0 +grpcio-reflection==1.47.0 # via feast (setup.py) h11==0.13.0 # via uvicorn @@ -75,7 +73,7 @@ idna==3.3 # requests jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.8.0 +jsonschema==4.9.0 # via feast (setup.py) locket==1.0.0 # via partd From 31be8e8965444ce52c16542bf3983093514d0abb Mon Sep 17 00:00:00 2001 From: Danny Chiao Date: Mon, 1 Aug 2022 23:03:21 -0500 Subject: [PATCH 71/73] chore: Update main README including template (#2994) Signed-off-by: Danny Chiao --- README.md | 9 ++++++++- infra/templates/README.md.jinja2 | 9 ++++++++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index df42737811..ab69636a20 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,14 @@ ## Overview -Feast is an open source feature store for machine learning. Feast is the fastest path to productionizing analytic data for model training and online inference. +Feast (**Fea**ture **St**ore) is an open source feature store for machine learning. Feast is the fastest path to manage existing infrastructure to productionize analytic data for model training and online inference. + + +Feast allows ML platform teams to: + +* **Make features consistently available for training and serving** by managing an _offline store_ (to process historical data for scale-out batch scoring or model training), a low-latency _online store_ (to power real-time prediction)_,_ and a battle-tested _feature server_ (for serving pre-computed features online). +* **Avoid data leakage** by generating point-in-time correct feature sets so data scientists can focus on feature engineering rather than debugging error-prone dataset joining logic. This ensure that future feature values do not leak to models during training. +* **Decouple ML from data infrastructure** by providing a single data access layer that abstracts feature storage from feature retrieval, ensuring models remain portable as you move from training models to serving models, from batch models to realtime models, and from one data infra system to another. Please see our [documentation](https://docs.feast.dev/) for more information about the project. diff --git a/infra/templates/README.md.jinja2 b/infra/templates/README.md.jinja2 index cd6e42c1d1..6a8ebdbab7 100644 --- a/infra/templates/README.md.jinja2 +++ b/infra/templates/README.md.jinja2 @@ -16,7 +16,14 @@ ## Overview -Feast is an open source feature store for machine learning. Feast is the fastest path to productionizing analytic data for model training and online inference. +Feast (**Fea**ture **St**ore) is an open source feature store for machine learning. Feast is the fastest path to manage existing infrastructure to productionize analytic data for model training and online inference. + + +Feast allows ML platform teams to: + +* **Make features consistently available for training and serving** by managing an _offline store_ (to process historical data for scale-out batch scoring or model training), a low-latency _online store_ (to power real-time prediction)_,_ and a battle-tested _feature server_ (for serving pre-computed features online). +* **Avoid data leakage** by generating point-in-time correct feature sets so data scientists can focus on feature engineering rather than debugging error-prone dataset joining logic. This ensure that future feature values do not leak to models during training. +* **Decouple ML from data infrastructure** by providing a single data access layer that abstracts feature storage from feature retrieval, ensuring models remain portable as you move from training models to serving models, from batch models to realtime models, and from one data infra system to another. Please see our [documentation](https://docs.feast.dev/) for more information about the project. From 41070cb8339b0971574375ef418301fe4d3ad94e Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Tue, 2 Aug 2022 12:17:32 -0700 Subject: [PATCH 72/73] docs: Add docs for repo-upgrade and update architecture stuff (#2989) * docs: Add docs for repo-upgrade and update architecture stuff Signed-off-by: Achal Shah * describe Signed-off-by: Achal Shah * Fixes Signed-off-by: Felix Wang * Update docs Signed-off-by: Felix Wang Co-authored-by: Felix Wang --- docs/SUMMARY.md | 2 +- .../batch-materialization-engine.md | 2 +- .../architecture-and-components/overview.md | 5 +- .../stream-processor.md | 8 ++ docs/how-to-guides/automated-feast-upgrade.md | 78 +++++++++++++++++++ 5 files changed, 92 insertions(+), 3 deletions(-) create mode 100644 docs/getting-started/architecture-and-components/stream-processor.md create mode 100644 docs/how-to-guides/automated-feast-upgrade.md diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index 88691d82f9..b0e88b413f 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -52,7 +52,7 @@ * [Read features from the online store](how-to-guides/feast-snowflake-gcp-aws/read-features-from-the-online-store.md) * [Running Feast in production](how-to-guides/running-feast-in-production.md) * [Upgrading from Feast 0.9](https://docs.google.com/document/u/1/d/1AOsr\_baczuARjCpmZgVd8mCqTF4AZ49OEyU4Cn-uTT0/edit) -* [Adding a custom provider](how-to-guides/creating-a-custom-provider.md) +* [Upgrading for Feast 0.20+](how-to-guides/automated-feast-upgrade.md) * [Adding a custom batch materialization engine](how-to-guides/creating-a-custom-materialization-engine.md) * [Adding a new online store](how-to-guides/adding-support-for-a-new-online-store.md) * [Adding a new offline store](how-to-guides/adding-a-new-offline-store.md) diff --git a/docs/getting-started/architecture-and-components/batch-materialization-engine.md b/docs/getting-started/architecture-and-components/batch-materialization-engine.md index da21bd4c59..fb3c83ccb4 100644 --- a/docs/getting-started/architecture-and-components/batch-materialization-engine.md +++ b/docs/getting-started/architecture-and-components/batch-materialization-engine.md @@ -6,5 +6,5 @@ A materialization engine abstracts over specific technologies or frameworks that If the built-in engines are not sufficient, you can create your own custom materialization engine. Please see [this guide](../../how-to-guides/creating-a-custom-materialization-engine.md) for more details. -Please see [feature\_store.yaml](../../reference/feature-repository/feature-store-yaml.md#overview) for configuring providers. +Please see [feature\_store.yaml](../../reference/feature-repository/feature-store-yaml.md#overview) for configuring engines. diff --git a/docs/getting-started/architecture-and-components/overview.md b/docs/getting-started/architecture-and-components/overview.md index 0c47fb2753..97bd779503 100644 --- a/docs/getting-started/architecture-and-components/overview.md +++ b/docs/getting-started/architecture-and-components/overview.md @@ -5,6 +5,7 @@ ## Functionality * **Create Batch Features:** ELT/ETL systems like Spark and SQL are used to transform data in the batch store. +* **Create Stream Features:** Stream features are created from streaming services such as Kafka or Kinesis, and can be pushed directly into Feast. * **Feast Apply:** The user (or CI) publishes versioned controlled feature definitions using `feast apply`. This CLI command updates infrastructure and persists definitions in the object store registry. * **Feast Materialize:** The user (or scheduler) executes `feast materialize` which loads features from the offline store into the online store. * **Model Training:** A model training pipeline is launched. It uses the Feast Python SDK to retrieve a training dataset and trains a model. @@ -23,8 +24,10 @@ A complete Feast deployment contains the following components: * Materialize (load) feature values into the online store. * Build and retrieve training datasets from the offline store. * Retrieve online features. +* **Stream Processor:** The Stream Processor can be used to ingest feature data from streams and write it into the online or offline stores. Currently, there's an experimental Spark processor that's able to consume data from Kafka. +* **Batch Materialization Engine:** The [Batch Materialization Engine](batch-materialization-engine.md) component launches a process which loads data into the online store from the offline store. By default, Feast uses a local in-process engine implementation to materialize data. However, additional infrastructure can be used for a more scalable materialization process. * **Online Store:** The online store is a database that stores only the latest feature values for each entity. The online store is populated by materialization jobs and from [stream ingestion](../../reference/data-sources/push.md). -* **Offline Store:** The offline store persists batch data that has been ingested into Feast. This data is used for producing training datasets. Feast does not manage the offline store directly, but runs queries against it. +* **Offline Store:** The offline store persists batch data that has been ingested into Feast. This data is used for producing training datasets. For feature retrieval and materialization, Feast does not manage the offline store directly, but runs queries against it. However, offline stores can be configured to write data to the offline store if Feast is configured to log served features and the offline store supports this functionality. {% hint style="info" %} Java and Go Clients are also available for online feature retrieval. diff --git a/docs/getting-started/architecture-and-components/stream-processor.md b/docs/getting-started/architecture-and-components/stream-processor.md new file mode 100644 index 0000000000..13b6e5b304 --- /dev/null +++ b/docs/getting-started/architecture-and-components/stream-processor.md @@ -0,0 +1,8 @@ +# Stream Processor + +A Stream Processor is responsible for consuming data from stream sources (such as Kafka, Kinesis, etc.) and loading it directly into the online (and optionally the offline store). + +A Stream Processor abstracts over specific technologies or frameworks that are used to materialize data. An experimental Spark Processor for Kafka is available in Feast. + +If the built-in processor is not sufficient, you can create your own custom processor. Please see [this tutorial](../../tutorials/building-streaming-features.md) for more details. + diff --git a/docs/how-to-guides/automated-feast-upgrade.md b/docs/how-to-guides/automated-feast-upgrade.md new file mode 100644 index 0000000000..ff17748537 --- /dev/null +++ b/docs/how-to-guides/automated-feast-upgrade.md @@ -0,0 +1,78 @@ +# Automated upgrades for Feast 0.20+ + +## Overview + +Starting with Feast 0.20, the APIs of many core objects (e.g. feature views and entities) have been changed. +For example, many parameters have been renamed. +These changes were made in a backwards-compatible fashion; existing Feast repositories will continue to work until Feast 0.23, without any changes required. +However, Feast 0.24 will fully deprecate all of the old parameters, so in order to use Feast 0.24+ users must modify their Feast repositories. + +There are currently deprecation warnings that indicate to users exactly how to modify their repos. +In order to make the process somewhat easier, Feast 0.23 also introduces a new CLI command, `repo-upgrade`, that will partially automate the process of upgrading Feast repositories. + +The upgrade command aims to automatically modify the object definitions in a feature repo to match the API required by Feast 0.24+. When running the command, the Feast CLI analyzes the source code in the feature repo files using [bowler](https://pybowler.io/), and attempted to rewrite the files in a best-effort way. It's possible for there to be parts of the API that are not upgraded automatically. + +The `repo-upgrade` command is specifically meant for upgrading Feast repositories that were initially created in versions 0.23 and below to be compatible with versions 0.24 and above. +It is not intended to work for any future upgrades. + +## Usage + +At the root of a feature repo, you can run `feast repo-upgrade`. By default, the CLI only echos the changes it's planning on making, and does not modify any files in place. If the changes look reasonably, you can specify the `--write` flag to have the changes be written out to disk. + +An example: +```bash +$ feast repo-upgrade --write +--- /Users/achal/feast/prompt_dory/example.py ++++ /Users/achal/feast/prompt_dory/example.py +@@ -13,7 +13,6 @@ + path="/Users/achal/feast/prompt_dory/data/driver_stats.parquet", + event_timestamp_column="event_timestamp", + created_timestamp_column="created", +- date_partition_column="created" + ) + + # Define an entity for the driver. You can think of entity as a primary key used to +--- /Users/achal/feast/prompt_dory/example.py ++++ /Users/achal/feast/prompt_dory/example.py +@@ -3,7 +3,7 @@ + from google.protobuf.duration_pb2 import Duration + import pandas as pd + +-from feast import Entity, Feature, FeatureView, FileSource, ValueType, FeatureService, OnDemandFeatureView ++from feast import Entity, FeatureView, FileSource, ValueType, FeatureService, OnDemandFeatureView + + # Read data from parquet files. Parquet is convenient for local development mode. For + # production, you can use your favorite DWH, such as BigQuery. See Feast documentation +--- /Users/achal/feast/prompt_dory/example.py ++++ /Users/achal/feast/prompt_dory/example.py +@@ -4,6 +4,7 @@ + import pandas as pd + + from feast import Entity, Feature, FeatureView, FileSource, ValueType, FeatureService, OnDemandFeatureView ++from feast import Field + + # Read data from parquet files. Parquet is convenient for local development mode. For + # production, you can use your favorite DWH, such as BigQuery. See Feast documentation +--- /Users/achal/feast/prompt_dory/example.py ++++ /Users/achal/feast/prompt_dory/example.py +@@ -28,9 +29,9 @@ + entities=["driver_id"], + ttl=Duration(seconds=86400 * 365), + features=[ +- Feature(name="conv_rate", dtype=ValueType.FLOAT), +- Feature(name="acc_rate", dtype=ValueType.FLOAT), +- Feature(name="avg_daily_trips", dtype=ValueType.INT64), ++ Field(name="conv_rate", dtype=ValueType.FLOAT), ++ Field(name="acc_rate", dtype=ValueType.FLOAT), ++ Field(name="avg_daily_trips", dtype=ValueType.INT64), + ], + online=True, + batch_source=driver_hourly_stats, +``` +--- +To write these changes out, you can run the same command with the `--write` flag: +```bash +$ feast repo-upgrade --write +``` + +You should see the same output, but also see the changes reflected in your feature repo on disk. \ No newline at end of file From 48a82754444004185edbc19b58c7a36ac52f477e Mon Sep 17 00:00:00 2001 From: feast-ci-bot Date: Tue, 2 Aug 2022 20:20:09 +0000 Subject: [PATCH 73/73] chore(release): release 0.23.0 # [0.23.0](https://github.com/feast-dev/feast/compare/v0.22.0...v0.23.0) (2022-08-02) ### Bug Fixes * Add dummy alias to pull_all_from_table_or_query ([#2956](https://github.com/feast-dev/feast/issues/2956)) ([5e45228](https://github.com/feast-dev/feast/commit/5e45228a406e6ee7f82e41cab7f734730ff2e73f)) * Bump version of Guava to mitigate cve ([#2896](https://github.com/feast-dev/feast/issues/2896)) ([51df8be](https://github.com/feast-dev/feast/commit/51df8be5d3b9bc702393d00e9a6370c703510358)) * Change numpy version on setup.py and upgrade it to resolve dependabot warning ([#2887](https://github.com/feast-dev/feast/issues/2887)) ([80ea7a9](https://github.com/feast-dev/feast/commit/80ea7a93a9d7ea19f9a1218430e008a33eb6d788)) * Change the feature store plan method to public modifier ([#2904](https://github.com/feast-dev/feast/issues/2904)) ([0ec7d1a](https://github.com/feast-dev/feast/commit/0ec7d1abd3f509e17870ca168ece356382fb7fe9)) * Deprecate 3.7 wheels and fix verification workflow ([#2934](https://github.com/feast-dev/feast/issues/2934)) ([040c910](https://github.com/feast-dev/feast/commit/040c9107b719a7b3f3c70ab743f148e47b0a0982)) * Do not allow same column to be reused in data sources ([#2965](https://github.com/feast-dev/feast/issues/2965)) ([661c053](https://github.com/feast-dev/feast/commit/661c0535f34b042846562a3fb4cdab4ab4403459)) * Fix build wheels workflow to install apache-arrow correctly ([#2932](https://github.com/feast-dev/feast/issues/2932)) ([bdeb4ae](https://github.com/feast-dev/feast/commit/bdeb4aeaf2a5cfa144a65cc84f7bfb26e3077e7a)) * Fix file offline store logic for feature views without ttl ([#2971](https://github.com/feast-dev/feast/issues/2971)) ([26f6b69](https://github.com/feast-dev/feast/commit/26f6b69b0e2c8a4ea37b43e3d1eaa4cdb8c085a9)) * Fix grpc and update protobuf ([#2894](https://github.com/feast-dev/feast/issues/2894)) ([86e9efd](https://github.com/feast-dev/feast/commit/86e9efdc893de817a359feb939f06717716c0b17)) * Fix night ci syntax error and update readme ([#2935](https://github.com/feast-dev/feast/issues/2935)) ([b917540](https://github.com/feast-dev/feast/commit/b917540c27052c01f872a2de686a6dd3b7a16e9c)) * Fix nightly ci again ([#2939](https://github.com/feast-dev/feast/issues/2939)) ([1603c9e](https://github.com/feast-dev/feast/commit/1603c9e7765e08bb1832c03b66b754afbf8a9b4d)) * Fix the go build and use CgoArrowAllocator to prevent incorrect garbage collection ([#2919](https://github.com/feast-dev/feast/issues/2919)) ([130746e](https://github.com/feast-dev/feast/commit/130746ea5cfadad6ef467c0cb0490d4745fdad70)) * Fix typo in CONTRIBUTING.md ([#2955](https://github.com/feast-dev/feast/issues/2955)) ([8534f69](https://github.com/feast-dev/feast/commit/8534f69026d03e6e5964ef3e9bc69cc18397a879)) * Fixing broken links to feast documentation on java readme and contribution ([#2892](https://github.com/feast-dev/feast/issues/2892)) ([d044588](https://github.com/feast-dev/feast/commit/d044588d702b3dc2dd6b9a9e28056df19d942a09)) * Fixing Spark min / max entity df event timestamps range return order ([#2735](https://github.com/feast-dev/feast/issues/2735)) ([ac55ce2](https://github.com/feast-dev/feast/commit/ac55ce25388abfa35e93097bd14190eeba08a165)) * Move gcp back to 1.47.0 since grpcio-tools 1.48.0 got yanked from pypi ([#2990](https://github.com/feast-dev/feast/issues/2990)) ([fc447eb](https://github.com/feast-dev/feast/commit/fc447eb3d0345dba6a45cdf5b1c1c2e982766cb9)) * Refactor testing and sort out unit and integration tests ([#2975](https://github.com/feast-dev/feast/issues/2975)) ([2680f7b](https://github.com/feast-dev/feast/commit/2680f7b031717b64e6ea3addf150369dccebdbc1)) * Remove hard-coded integration test setup for AWS & GCP ([#2970](https://github.com/feast-dev/feast/issues/2970)) ([e4507ac](https://github.com/feast-dev/feast/commit/e4507ac16540cb3a7e29c31121963a0fe8f79fe4)) * Resolve small typo in README file ([#2930](https://github.com/feast-dev/feast/issues/2930)) ([16ae902](https://github.com/feast-dev/feast/commit/16ae902909911bbf45d0e430895b3bc20bba01e9)) * Revert "feat: Add snowflake online store ([#2902](https://github.com/feast-dev/feast/issues/2902))" ([#2909](https://github.com/feast-dev/feast/issues/2909)) ([38fd001](https://github.com/feast-dev/feast/commit/38fd00195f8ed309b2e7bae06d48cb10ab82f5aa)) * Snowflake_online_read fix ([#2988](https://github.com/feast-dev/feast/issues/2988)) ([651ce34](https://github.com/feast-dev/feast/commit/651ce341687034ce07ca959f805f3c90dccfd4cc)) * Spark source support table with pattern "db.table" ([#2606](https://github.com/feast-dev/feast/issues/2606)) ([3ce5139](https://github.com/feast-dev/feast/commit/3ce51391e0b2ebdec68c81d93b54f5d06bb427a6)), closes [#2605](https://github.com/feast-dev/feast/issues/2605) * Switch mysql log string to use regex ([#2976](https://github.com/feast-dev/feast/issues/2976)) ([5edf4b0](https://github.com/feast-dev/feast/commit/5edf4b0332a298a0e172dd58e0a627efe5705eec)) * Update gopy to point to fork to resolve github annotation errors. ([#2940](https://github.com/feast-dev/feast/issues/2940)) ([ba2dcf1](https://github.com/feast-dev/feast/commit/ba2dcf13fe9dc4c082816a737100e00e3e9a8ad2)) * Version entity serialization mechanism and fix issue with int64 vals ([#2944](https://github.com/feast-dev/feast/issues/2944)) ([d0d27a3](https://github.com/feast-dev/feast/commit/d0d27a35a0d63a139970cb17542764ff2aaf6aaf)) ### Features * Add an experimental lambda-based materialization engine ([#2923](https://github.com/feast-dev/feast/issues/2923)) ([6f79069](https://github.com/feast-dev/feast/commit/6f79069c561eba888d070c46aae920f7ad0c2319)) * Add column reordering to `write_to_offline_store` ([#2876](https://github.com/feast-dev/feast/issues/2876)) ([8abc2ef](https://github.com/feast-dev/feast/commit/8abc2ef76d461b6b4bbd97e2dfdf29c1c335cb80)) * Add custom JSON table tab w/ formatting ([#2851](https://github.com/feast-dev/feast/issues/2851)) ([0159f38](https://github.com/feast-dev/feast/commit/0159f3875de7c8509c465346bd13dd11fba0d467)) * Add CustomSourceOptions to SavedDatasetStorage ([#2958](https://github.com/feast-dev/feast/issues/2958)) ([23c09c8](https://github.com/feast-dev/feast/commit/23c09c83bc530de830ba867b10ceb02f113db5d6)) * Add Go option to `feast serve` command ([#2966](https://github.com/feast-dev/feast/issues/2966)) ([a36a695](https://github.com/feast-dev/feast/commit/a36a6950b34d718ad328b4faca0c178fb23a3100)) * Add interfaces for batch materialization engine ([#2901](https://github.com/feast-dev/feast/issues/2901)) ([38b28ca](https://github.com/feast-dev/feast/commit/38b28ca0181610c65d966a2f09456dbb102fbced)) * Add pages for individual Features to the Feast UI ([#2850](https://github.com/feast-dev/feast/issues/2850)) ([9b97fca](https://github.com/feast-dev/feast/commit/9b97fca876d9520d6e1f9025562036330cc0aabd)) * Add snowflake online store ([#2902](https://github.com/feast-dev/feast/issues/2902)) ([f758f9e](https://github.com/feast-dev/feast/commit/f758f9e148212d08f63df155e864940c27d92155)), closes [#2903](https://github.com/feast-dev/feast/issues/2903) * Add Snowflake online store (again) ([#2922](https://github.com/feast-dev/feast/issues/2922)) ([2ef71fc](https://github.com/feast-dev/feast/commit/2ef71fc6b3ec4fca3b543f2f64bed765b09c3af4)), closes [#2903](https://github.com/feast-dev/feast/issues/2903) * Add to_remote_storage method to RetrievalJob ([#2916](https://github.com/feast-dev/feast/issues/2916)) ([109ee9c](https://github.com/feast-dev/feast/commit/109ee9cff5bcda46889583f2968003f6a3e375b3)) * Support retrieval from multiple feature views with different join keys ([#2835](https://github.com/feast-dev/feast/issues/2835)) ([056cfa1](https://github.com/feast-dev/feast/commit/056cfa1b21db4ff092b9d1f9c06f7300a4c9f4b7)) --- CHANGELOG.md | 46 +++++++++++++++++++ infra/charts/feast-python-server/Chart.yaml | 2 +- infra/charts/feast-python-server/README.md | 2 +- infra/charts/feast/Chart.yaml | 2 +- infra/charts/feast/README.md | 6 +-- .../feast/charts/feature-server/Chart.yaml | 4 +- .../feast/charts/feature-server/README.md | 4 +- .../feast/charts/feature-server/values.yaml | 2 +- .../charts/transformation-service/Chart.yaml | 4 +- .../charts/transformation-service/README.md | 4 +- .../charts/transformation-service/values.yaml | 2 +- infra/charts/feast/requirements.yaml | 4 +- java/pom.xml | 2 +- 13 files changed, 65 insertions(+), 19 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bd7e8098f3..80852af83d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,51 @@ # Changelog +# [0.23.0](https://github.com/feast-dev/feast/compare/v0.22.0...v0.23.0) (2022-08-02) + + +### Bug Fixes + +* Add dummy alias to pull_all_from_table_or_query ([#2956](https://github.com/feast-dev/feast/issues/2956)) ([5e45228](https://github.com/feast-dev/feast/commit/5e45228a406e6ee7f82e41cab7f734730ff2e73f)) +* Bump version of Guava to mitigate cve ([#2896](https://github.com/feast-dev/feast/issues/2896)) ([51df8be](https://github.com/feast-dev/feast/commit/51df8be5d3b9bc702393d00e9a6370c703510358)) +* Change numpy version on setup.py and upgrade it to resolve dependabot warning ([#2887](https://github.com/feast-dev/feast/issues/2887)) ([80ea7a9](https://github.com/feast-dev/feast/commit/80ea7a93a9d7ea19f9a1218430e008a33eb6d788)) +* Change the feature store plan method to public modifier ([#2904](https://github.com/feast-dev/feast/issues/2904)) ([0ec7d1a](https://github.com/feast-dev/feast/commit/0ec7d1abd3f509e17870ca168ece356382fb7fe9)) +* Deprecate 3.7 wheels and fix verification workflow ([#2934](https://github.com/feast-dev/feast/issues/2934)) ([040c910](https://github.com/feast-dev/feast/commit/040c9107b719a7b3f3c70ab743f148e47b0a0982)) +* Do not allow same column to be reused in data sources ([#2965](https://github.com/feast-dev/feast/issues/2965)) ([661c053](https://github.com/feast-dev/feast/commit/661c0535f34b042846562a3fb4cdab4ab4403459)) +* Fix build wheels workflow to install apache-arrow correctly ([#2932](https://github.com/feast-dev/feast/issues/2932)) ([bdeb4ae](https://github.com/feast-dev/feast/commit/bdeb4aeaf2a5cfa144a65cc84f7bfb26e3077e7a)) +* Fix file offline store logic for feature views without ttl ([#2971](https://github.com/feast-dev/feast/issues/2971)) ([26f6b69](https://github.com/feast-dev/feast/commit/26f6b69b0e2c8a4ea37b43e3d1eaa4cdb8c085a9)) +* Fix grpc and update protobuf ([#2894](https://github.com/feast-dev/feast/issues/2894)) ([86e9efd](https://github.com/feast-dev/feast/commit/86e9efdc893de817a359feb939f06717716c0b17)) +* Fix night ci syntax error and update readme ([#2935](https://github.com/feast-dev/feast/issues/2935)) ([b917540](https://github.com/feast-dev/feast/commit/b917540c27052c01f872a2de686a6dd3b7a16e9c)) +* Fix nightly ci again ([#2939](https://github.com/feast-dev/feast/issues/2939)) ([1603c9e](https://github.com/feast-dev/feast/commit/1603c9e7765e08bb1832c03b66b754afbf8a9b4d)) +* Fix the go build and use CgoArrowAllocator to prevent incorrect garbage collection ([#2919](https://github.com/feast-dev/feast/issues/2919)) ([130746e](https://github.com/feast-dev/feast/commit/130746ea5cfadad6ef467c0cb0490d4745fdad70)) +* Fix typo in CONTRIBUTING.md ([#2955](https://github.com/feast-dev/feast/issues/2955)) ([8534f69](https://github.com/feast-dev/feast/commit/8534f69026d03e6e5964ef3e9bc69cc18397a879)) +* Fixing broken links to feast documentation on java readme and contribution ([#2892](https://github.com/feast-dev/feast/issues/2892)) ([d044588](https://github.com/feast-dev/feast/commit/d044588d702b3dc2dd6b9a9e28056df19d942a09)) +* Fixing Spark min / max entity df event timestamps range return order ([#2735](https://github.com/feast-dev/feast/issues/2735)) ([ac55ce2](https://github.com/feast-dev/feast/commit/ac55ce25388abfa35e93097bd14190eeba08a165)) +* Move gcp back to 1.47.0 since grpcio-tools 1.48.0 got yanked from pypi ([#2990](https://github.com/feast-dev/feast/issues/2990)) ([fc447eb](https://github.com/feast-dev/feast/commit/fc447eb3d0345dba6a45cdf5b1c1c2e982766cb9)) +* Refactor testing and sort out unit and integration tests ([#2975](https://github.com/feast-dev/feast/issues/2975)) ([2680f7b](https://github.com/feast-dev/feast/commit/2680f7b031717b64e6ea3addf150369dccebdbc1)) +* Remove hard-coded integration test setup for AWS & GCP ([#2970](https://github.com/feast-dev/feast/issues/2970)) ([e4507ac](https://github.com/feast-dev/feast/commit/e4507ac16540cb3a7e29c31121963a0fe8f79fe4)) +* Resolve small typo in README file ([#2930](https://github.com/feast-dev/feast/issues/2930)) ([16ae902](https://github.com/feast-dev/feast/commit/16ae902909911bbf45d0e430895b3bc20bba01e9)) +* Revert "feat: Add snowflake online store ([#2902](https://github.com/feast-dev/feast/issues/2902))" ([#2909](https://github.com/feast-dev/feast/issues/2909)) ([38fd001](https://github.com/feast-dev/feast/commit/38fd00195f8ed309b2e7bae06d48cb10ab82f5aa)) +* Snowflake_online_read fix ([#2988](https://github.com/feast-dev/feast/issues/2988)) ([651ce34](https://github.com/feast-dev/feast/commit/651ce341687034ce07ca959f805f3c90dccfd4cc)) +* Spark source support table with pattern "db.table" ([#2606](https://github.com/feast-dev/feast/issues/2606)) ([3ce5139](https://github.com/feast-dev/feast/commit/3ce51391e0b2ebdec68c81d93b54f5d06bb427a6)), closes [#2605](https://github.com/feast-dev/feast/issues/2605) +* Switch mysql log string to use regex ([#2976](https://github.com/feast-dev/feast/issues/2976)) ([5edf4b0](https://github.com/feast-dev/feast/commit/5edf4b0332a298a0e172dd58e0a627efe5705eec)) +* Update gopy to point to fork to resolve github annotation errors. ([#2940](https://github.com/feast-dev/feast/issues/2940)) ([ba2dcf1](https://github.com/feast-dev/feast/commit/ba2dcf13fe9dc4c082816a737100e00e3e9a8ad2)) +* Version entity serialization mechanism and fix issue with int64 vals ([#2944](https://github.com/feast-dev/feast/issues/2944)) ([d0d27a3](https://github.com/feast-dev/feast/commit/d0d27a35a0d63a139970cb17542764ff2aaf6aaf)) + + +### Features + +* Add an experimental lambda-based materialization engine ([#2923](https://github.com/feast-dev/feast/issues/2923)) ([6f79069](https://github.com/feast-dev/feast/commit/6f79069c561eba888d070c46aae920f7ad0c2319)) +* Add column reordering to `write_to_offline_store` ([#2876](https://github.com/feast-dev/feast/issues/2876)) ([8abc2ef](https://github.com/feast-dev/feast/commit/8abc2ef76d461b6b4bbd97e2dfdf29c1c335cb80)) +* Add custom JSON table tab w/ formatting ([#2851](https://github.com/feast-dev/feast/issues/2851)) ([0159f38](https://github.com/feast-dev/feast/commit/0159f3875de7c8509c465346bd13dd11fba0d467)) +* Add CustomSourceOptions to SavedDatasetStorage ([#2958](https://github.com/feast-dev/feast/issues/2958)) ([23c09c8](https://github.com/feast-dev/feast/commit/23c09c83bc530de830ba867b10ceb02f113db5d6)) +* Add Go option to `feast serve` command ([#2966](https://github.com/feast-dev/feast/issues/2966)) ([a36a695](https://github.com/feast-dev/feast/commit/a36a6950b34d718ad328b4faca0c178fb23a3100)) +* Add interfaces for batch materialization engine ([#2901](https://github.com/feast-dev/feast/issues/2901)) ([38b28ca](https://github.com/feast-dev/feast/commit/38b28ca0181610c65d966a2f09456dbb102fbced)) +* Add pages for individual Features to the Feast UI ([#2850](https://github.com/feast-dev/feast/issues/2850)) ([9b97fca](https://github.com/feast-dev/feast/commit/9b97fca876d9520d6e1f9025562036330cc0aabd)) +* Add snowflake online store ([#2902](https://github.com/feast-dev/feast/issues/2902)) ([f758f9e](https://github.com/feast-dev/feast/commit/f758f9e148212d08f63df155e864940c27d92155)), closes [#2903](https://github.com/feast-dev/feast/issues/2903) +* Add Snowflake online store (again) ([#2922](https://github.com/feast-dev/feast/issues/2922)) ([2ef71fc](https://github.com/feast-dev/feast/commit/2ef71fc6b3ec4fca3b543f2f64bed765b09c3af4)), closes [#2903](https://github.com/feast-dev/feast/issues/2903) +* Add to_remote_storage method to RetrievalJob ([#2916](https://github.com/feast-dev/feast/issues/2916)) ([109ee9c](https://github.com/feast-dev/feast/commit/109ee9cff5bcda46889583f2968003f6a3e375b3)) +* Support retrieval from multiple feature views with different join keys ([#2835](https://github.com/feast-dev/feast/issues/2835)) ([056cfa1](https://github.com/feast-dev/feast/commit/056cfa1b21db4ff092b9d1f9c06f7300a4c9f4b7)) + # [0.22.0](https://github.com/feast-dev/feast/compare/v0.21.0...v0.22.0) (2022-06-29) diff --git a/infra/charts/feast-python-server/Chart.yaml b/infra/charts/feast-python-server/Chart.yaml index 6c4751e3b7..6ab82b7a65 100644 --- a/infra/charts/feast-python-server/Chart.yaml +++ b/infra/charts/feast-python-server/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: feast-python-server description: Feast Feature Server in Python type: application -version: 0.22.0 +version: 0.23.0 keywords: - machine learning - big data diff --git a/infra/charts/feast-python-server/README.md b/infra/charts/feast-python-server/README.md index 17ce5be682..e3da9b1d29 100644 --- a/infra/charts/feast-python-server/README.md +++ b/infra/charts/feast-python-server/README.md @@ -1,6 +1,6 @@ # feast-python-server -![Version: 0.22.0](https://img.shields.io/badge/Version-0.22.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) +![Version: 0.23.0](https://img.shields.io/badge/Version-0.23.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) Feast Feature Server in Python diff --git a/infra/charts/feast/Chart.yaml b/infra/charts/feast/Chart.yaml index 012dc47de9..f4e33de7f3 100644 --- a/infra/charts/feast/Chart.yaml +++ b/infra/charts/feast/Chart.yaml @@ -1,7 +1,7 @@ apiVersion: v1 description: Feature store for machine learning name: feast -version: 0.22.0 +version: 0.23.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/README.md b/infra/charts/feast/README.md index 8b5e9718ef..f71dcf6124 100644 --- a/infra/charts/feast/README.md +++ b/infra/charts/feast/README.md @@ -8,7 +8,7 @@ This repo contains Helm charts for Feast components that are being installed on ## Chart: Feast -Feature store for machine learning Current chart version is `0.22.0` +Feature store for machine learning Current chart version is `0.23.0` ## Installation @@ -55,8 +55,8 @@ For more details, please see: https://docs.feast.dev/how-to-guides/running-feast | Repository | Name | Version | |------------|------|---------| | https://charts.helm.sh/stable | redis | 10.5.6 | -| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.22.0 | -| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.22.0 | +| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.23.0 | +| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.23.0 | ## Values diff --git a/infra/charts/feast/charts/feature-server/Chart.yaml b/infra/charts/feast/charts/feature-server/Chart.yaml index b88ad599b4..ee08b0b0f8 100644 --- a/infra/charts/feast/charts/feature-server/Chart.yaml +++ b/infra/charts/feast/charts/feature-server/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: "Feast Feature Server: Online feature serving service for Feast" name: feature-server -version: 0.22.0 -appVersion: v0.22.0 +version: 0.23.0 +appVersion: v0.23.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/charts/feature-server/README.md b/infra/charts/feast/charts/feature-server/README.md index 28570b0fc6..4717cfff3a 100644 --- a/infra/charts/feast/charts/feature-server/README.md +++ b/infra/charts/feast/charts/feature-server/README.md @@ -1,6 +1,6 @@ # feature-server -![Version: 0.22.0](https://img.shields.io/badge/Version-0.22.0-informational?style=flat-square) ![AppVersion: v0.22.0](https://img.shields.io/badge/AppVersion-v0.22.0-informational?style=flat-square) +![Version: 0.23.0](https://img.shields.io/badge/Version-0.23.0-informational?style=flat-square) ![AppVersion: v0.23.0](https://img.shields.io/badge/AppVersion-v0.23.0-informational?style=flat-square) Feast Feature Server: Online feature serving service for Feast @@ -17,7 +17,7 @@ Feast Feature Server: Online feature serving service for Feast | envOverrides | object | `{}` | Extra environment variables to set | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"feastdev/feature-server-java"` | Docker image for Feature Server repository | -| image.tag | string | `"0.22.0"` | Image tag | +| image.tag | string | `"0.23.0"` | Image tag | | ingress.grpc.annotations | object | `{}` | Extra annotations for the ingress | | ingress.grpc.auth.enabled | bool | `false` | Flag to enable auth | | ingress.grpc.class | string | `"nginx"` | Which ingress controller to use | diff --git a/infra/charts/feast/charts/feature-server/values.yaml b/infra/charts/feast/charts/feature-server/values.yaml index df8367fede..011ce9dc33 100644 --- a/infra/charts/feast/charts/feature-server/values.yaml +++ b/infra/charts/feast/charts/feature-server/values.yaml @@ -5,7 +5,7 @@ image: # image.repository -- Docker image for Feature Server repository repository: feastdev/feature-server-java # image.tag -- Image tag - tag: 0.22.0 + tag: 0.23.0 # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent diff --git a/infra/charts/feast/charts/transformation-service/Chart.yaml b/infra/charts/feast/charts/transformation-service/Chart.yaml index 148e136acf..07055730c5 100644 --- a/infra/charts/feast/charts/transformation-service/Chart.yaml +++ b/infra/charts/feast/charts/transformation-service/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: "Transformation service: to compute on-demand features" name: transformation-service -version: 0.22.0 -appVersion: v0.22.0 +version: 0.23.0 +appVersion: v0.23.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/charts/transformation-service/README.md b/infra/charts/feast/charts/transformation-service/README.md index 4cbc1048f6..9bc7a1e5d6 100644 --- a/infra/charts/feast/charts/transformation-service/README.md +++ b/infra/charts/feast/charts/transformation-service/README.md @@ -1,6 +1,6 @@ # transformation-service -![Version: 0.22.0](https://img.shields.io/badge/Version-0.22.0-informational?style=flat-square) ![AppVersion: v0.22.0](https://img.shields.io/badge/AppVersion-v0.22.0-informational?style=flat-square) +![Version: 0.23.0](https://img.shields.io/badge/Version-0.23.0-informational?style=flat-square) ![AppVersion: v0.23.0](https://img.shields.io/badge/AppVersion-v0.23.0-informational?style=flat-square) Transformation service: to compute on-demand features @@ -13,7 +13,7 @@ Transformation service: to compute on-demand features | envOverrides | object | `{}` | Extra environment variables to set | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"feastdev/feature-transformation-server"` | Docker image for Transformation Server repository | -| image.tag | string | `"0.22.0"` | Image tag | +| image.tag | string | `"0.23.0"` | Image tag | | nodeSelector | object | `{}` | Node labels for pod assignment | | podLabels | object | `{}` | Labels to be added to Feast Serving pods | | replicaCount | int | `1` | Number of pods that will be created | diff --git a/infra/charts/feast/charts/transformation-service/values.yaml b/infra/charts/feast/charts/transformation-service/values.yaml index 1264ea4f7b..c1e506a476 100644 --- a/infra/charts/feast/charts/transformation-service/values.yaml +++ b/infra/charts/feast/charts/transformation-service/values.yaml @@ -5,7 +5,7 @@ image: # image.repository -- Docker image for Transformation Server repository repository: feastdev/feature-transformation-server # image.tag -- Image tag - tag: 0.22.0 + tag: 0.23.0 # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent diff --git a/infra/charts/feast/requirements.yaml b/infra/charts/feast/requirements.yaml index 0b69f295e7..c88fb7a4fa 100644 --- a/infra/charts/feast/requirements.yaml +++ b/infra/charts/feast/requirements.yaml @@ -1,12 +1,12 @@ dependencies: - name: feature-server alias: feature-server - version: 0.22.0 + version: 0.23.0 condition: feature-server.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: transformation-service alias: transformation-service - version: 0.22.0 + version: 0.23.0 condition: transformation-service.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: redis diff --git a/java/pom.xml b/java/pom.xml index 475e87ff13..0bf92ee244 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -38,7 +38,7 @@ - 0.22.0 + 0.23.0 https://github.com/feast-dev/feast UTF-8